Init
This commit is contained in:
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.venv
|
||||
OxApp.dist
|
||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
10
.idea/misc.xml
generated
Normal file
10
.idea/misc.xml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.13 (oxapp25)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (oxapp25) (2)" project-jdk-type="Python SDK" />
|
||||
<component name="PythonCompatibilityInspectionAdvertiser">
|
||||
<option name="version" value="3" />
|
||||
</component>
|
||||
</project>
|
||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/oxapp25.iml" filepath="$PROJECT_DIR$/.idea/oxapp25.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
10
.idea/oxapp25.iml
generated
Normal file
10
.idea/oxapp25.iml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.13 (oxapp25) (2)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
53
OxApp.spec
Normal file
53
OxApp.spec
Normal file
@@ -0,0 +1,53 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['main.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports = [
|
||||
'qasync',
|
||||
'asyncio',
|
||||
'aiopath',
|
||||
'aiohttp',
|
||||
'aiofiles',
|
||||
'PySide6.QtWebEngineCore',
|
||||
'PySide6.QtWebEngineWidgets'
|
||||
],
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=['tkinter', 'matplotlib', 'scipy', 'numpy', 'pandas',
|
||||
'IPython', 'jupyter', 'PyQt5', 'PyQt6'],
|
||||
noarchive=False,
|
||||
optimize=0,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name='OxApp',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=False,
|
||||
console=False,
|
||||
disable_windowed_traceback=False,
|
||||
argv_emulation=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
)
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=False,
|
||||
upx_exclude=['vcruntime140.dll', 'libcrypto-1_1.dll', 'libssl-1_1.dll'],
|
||||
name='OxApp',
|
||||
)
|
||||
2
README.md
Normal file
2
README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
build `pyinstaller OxApp.spec --noconfirm`
|
||||
build pyside6-deploy `pyside6-deploy`
|
||||
12
build.bat
Normal file
12
build.bat
Normal file
@@ -0,0 +1,12 @@
|
||||
@echo off
|
||||
if "%1"=="resources" (
|
||||
pyside6-rcc resources.qrc -o resources_rc.py
|
||||
) else if "%1"=="deploy" (
|
||||
pyside6-deploy.exe -c pysidedeploy.spec
|
||||
) else if "%1"=="pyinstaller" (
|
||||
pyinstaller OxApp.spec --noconfirm
|
||||
) else (
|
||||
echo Commandes disponibles:
|
||||
echo build.bat resources - Compile les ressources
|
||||
echo build.bat deploy - Déploie l'application
|
||||
)
|
||||
101
main.py
Normal file
101
main.py
Normal file
@@ -0,0 +1,101 @@
|
||||
|
||||
|
||||
from PySide6.QtCore import QStandardPaths, QDataStream, QByteArray, QIODevice, Signal, Qt
|
||||
from PySide6.QtNetwork import QLocalServer, QLocalSocket
|
||||
from PySide6.QtWidgets import QApplication
|
||||
|
||||
import qasync
|
||||
import sys
|
||||
import asyncio
|
||||
import os
|
||||
import platform
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from src.logs import configure_logging
|
||||
from windows.main_window import MainWindow
|
||||
|
||||
|
||||
class SingleApplication(QApplication):
|
||||
# Signal émis lorsque des fichiers sont reçus d'une instance secondaire
|
||||
files_received = Signal(list)
|
||||
|
||||
def __init__(self, app_id, args):
|
||||
super().__init__(args)
|
||||
self.app_id = app_id
|
||||
self.server = None
|
||||
self.is_primary_instance = self.try_connect_to_primary()
|
||||
|
||||
if self.is_primary_instance:
|
||||
# C'est la première instance, on crée un serveur local
|
||||
self.server = QLocalServer()
|
||||
self.server.newConnection.connect(self.handle_new_connection)
|
||||
if not self.server.listen(self.app_id):
|
||||
# En cas d'erreur (serveur déjà existant mais zombie), on le supprime et on réessaie
|
||||
QLocalServer.removeServer(self.app_id)
|
||||
self.server.listen(self.app_id)
|
||||
|
||||
def try_connect_to_primary(self):
|
||||
"""Essaie de se connecter à l'instance primaire de l'application"""
|
||||
socket = QLocalSocket()
|
||||
socket.connectToServer(self.app_id, QIODevice.OpenModeFlag.WriteOnly)
|
||||
|
||||
if socket.waitForConnected(500):
|
||||
# Récupérer les arguments pour les envoyer à l'instance primaire
|
||||
args = sys.argv[1:] if len(sys.argv) > 1 else []
|
||||
|
||||
# Envoyer les arguments à l'instance primaire
|
||||
stream = QDataStream(socket)
|
||||
stream.writeQString(";".join(args))
|
||||
socket.flush()
|
||||
socket.disconnectFromServer()
|
||||
return False # Ce n'est pas l'instance primaire
|
||||
return True # C'est l'instance primaire
|
||||
|
||||
def handle_new_connection(self):
|
||||
"""Gère une nouvelle connexion d'une instance secondaire"""
|
||||
socket = self.server.nextPendingConnection()
|
||||
if socket.waitForReadyRead(1000):
|
||||
stream = QDataStream(socket)
|
||||
args_str = stream.readQString()
|
||||
args = args_str.split(";") if args_str else []
|
||||
|
||||
# Émettre un signal pour informer l'application des fichiers à ouvrir
|
||||
if args:
|
||||
self.files_received.emit(args)
|
||||
|
||||
socket.disconnectFromServer()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Analyser les arguments de la ligne de commande
|
||||
parser = argparse.ArgumentParser(description='Application PySide6', allow_abbrev=False)
|
||||
parser.add_argument('--dev', action='store_true', help='Active le mode développement avec logs')
|
||||
parser.add_argument('files', nargs='*', help='Fichiers à ouvrir')
|
||||
args, unknown = parser.parse_known_args()
|
||||
|
||||
# Configurer le logging en fonction du mode
|
||||
configure_logging(args.dev)
|
||||
|
||||
os.environ["QTWEBENGINE_CHROMIUM_FLAGS"] = "--enable-gpu-rasterization --ignore-gpu-blocklist"
|
||||
if args.dev:
|
||||
os.environ["QTWEBENGINE_REMOTE_DEBUGGING"] = "4000"
|
||||
|
||||
app_id = "OxAPP25"
|
||||
app = SingleApplication(app_id, sys.argv)
|
||||
|
||||
event_loop = qasync.QEventLoop(app)
|
||||
asyncio.set_event_loop(event_loop)
|
||||
|
||||
app_close_event = asyncio.Event()
|
||||
app.aboutToQuit.connect(app_close_event.set)
|
||||
|
||||
window = MainWindow()
|
||||
# Connecter le signal de fichiers reçus à une méthode de traitement
|
||||
app.files_received.connect(window.handle_files)
|
||||
if args.files:
|
||||
window.handle_files(args.files)
|
||||
window.show()
|
||||
|
||||
with event_loop:
|
||||
event_loop.run_until_complete(app_close_event.wait())
|
||||
BIN
oxpanel.ico
Normal file
BIN
oxpanel.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 115 KiB |
102
pysidedeploy.spec
Normal file
102
pysidedeploy.spec
Normal file
@@ -0,0 +1,102 @@
|
||||
[app]
|
||||
|
||||
# title of your application
|
||||
title = OxApp
|
||||
|
||||
# executable name
|
||||
executable_name = OxApp
|
||||
|
||||
# project directory. the general assumption is that project_dir is the parent directory
|
||||
# of input_file
|
||||
project_dir = D:\Dev\oxapp25
|
||||
|
||||
# source file path
|
||||
input_file = D:\Dev\oxapp25\main.py
|
||||
|
||||
# directory where the executable output is generated
|
||||
exec_directory = .
|
||||
|
||||
# path to .pyproject project file
|
||||
project_file =
|
||||
|
||||
# application icon
|
||||
icon = D:\Dev\oxapp25\oxpanel.ico
|
||||
|
||||
[python]
|
||||
|
||||
# python path
|
||||
python_path = D:\Dev\oxapp25\.venv\Scripts\python.exe
|
||||
|
||||
# python packages to install
|
||||
packages = Nuitka==2.5.1
|
||||
|
||||
# buildozer = for deploying Android application
|
||||
android_packages = buildozer==1.5.0,cython==0.29.33
|
||||
|
||||
[qt]
|
||||
|
||||
# comma separated path to qml files required
|
||||
# normally all the qml files required by the project are added automatically
|
||||
qml_files =
|
||||
|
||||
# excluded qml plugin binaries
|
||||
excluded_qml_plugins =
|
||||
|
||||
# qt modules used. comma separated
|
||||
modules = Gui,Network,Core,Widgets,WebChannel,WebEngineWidgets,WebEngineCore
|
||||
|
||||
# qt plugins used by the application. only relevant for desktop deployment. for qt plugins used
|
||||
# in android application see [android][plugins]
|
||||
plugins = networkinformation,styles,networkaccess,tls
|
||||
|
||||
[android]
|
||||
|
||||
# path to pyside wheel
|
||||
wheel_pyside =
|
||||
|
||||
# path to shiboken wheel
|
||||
wheel_shiboken =
|
||||
|
||||
# plugins to be copied to libs folder of the packaged application. comma separated
|
||||
plugins =
|
||||
|
||||
[nuitka]
|
||||
|
||||
# usage description for permissions requested by the app as found in the info.plist file
|
||||
# of the app bundle
|
||||
# eg = extra_args = --show-modules --follow-stdlib
|
||||
macos.permissions =
|
||||
|
||||
# mode of using nuitka. accepts standalone or onefile. default is onefile.
|
||||
mode = standalone
|
||||
|
||||
# (str) specify any extra nuitka arguments
|
||||
extra_args = --quiet --noinclude-qt-translations --windows-console-mode=disable --output-filename=oxapp
|
||||
|
||||
[buildozer]
|
||||
|
||||
# build mode
|
||||
# possible options = [release, debug]
|
||||
# release creates an aab, while debug creates an apk
|
||||
mode = release
|
||||
|
||||
# contrains path to pyside6 and shiboken6 recipe dir
|
||||
recipe_dir =
|
||||
|
||||
# path to extra qt android jars to be loaded by the application
|
||||
jars_dir =
|
||||
|
||||
# if empty uses default ndk path downloaded by buildozer
|
||||
ndk_path =
|
||||
|
||||
# if empty uses default sdk path downloaded by buildozer
|
||||
sdk_path =
|
||||
|
||||
# other libraries to be loaded. comma separated.
|
||||
# loaded at app startup
|
||||
local_libs =
|
||||
|
||||
# architecture of deployed platform
|
||||
# possible values = ["aarch64", "armv7a", "i686", "x86_64"]
|
||||
arch =
|
||||
|
||||
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
PySide6<6.9,>=6.8.0
|
||||
qasync>=0.27.1
|
||||
httpx
|
||||
anyio
|
||||
6
resources.qrc
Normal file
6
resources.qrc
Normal file
@@ -0,0 +1,6 @@
|
||||
<!DOCTYPE RCC>
|
||||
<RCC version="1.0">
|
||||
<qresource>
|
||||
<file>oxpanel.ico</file>
|
||||
</qresource>
|
||||
</RCC>
|
||||
1812
resources_rc.py
Normal file
1812
resources_rc.py
Normal file
File diff suppressed because it is too large
Load Diff
61
setup.iss
Normal file
61
setup.iss
Normal file
@@ -0,0 +1,61 @@
|
||||
; Script généré par l'assistant InnoSetup
|
||||
; Ajustez les valeurs selon votre projet
|
||||
|
||||
#define MyAppName "OxApp"
|
||||
#define MyAppVersion "1.0.0"
|
||||
#define MyAppPublisher "Oxpanel"
|
||||
#define MyAppURL "https://oxpanel.com"
|
||||
#define MyAppExeName "OxApp.exe"
|
||||
#define MyAppAssocName MyAppName + " File"
|
||||
#define MyAppAssocExt ".torrent"
|
||||
#define MyAppAssocKey StringChange(MyAppAssocName, " ", "") + MyAppAssocExt
|
||||
|
||||
[Setup]
|
||||
; NOTE: Le AppId identifie uniquement cette application.
|
||||
; Ne pas utiliser le même AppId pour d'autres installations.
|
||||
AppId={{8F52C7E3-D54A-4B9F-B04C-8A78B15C1F34}}
|
||||
AppName={#MyAppName}
|
||||
AppVersion={#MyAppVersion}
|
||||
AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
DefaultDirName={autopf}\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
; Si vous souhaitez que les utilisateurs puissent désactiver les raccourcis du menu Démarrer
|
||||
AllowNoIcons=yes
|
||||
; Recommandé pour les applications Windows
|
||||
OutputDir=.\installer
|
||||
OutputBaseFilename=OxApp_Setup
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
; Icône d'installation
|
||||
; SetupIconFile=path\to\icon.ico
|
||||
; Interface graphique moderne
|
||||
WizardStyle=modern
|
||||
|
||||
[Languages]
|
||||
Name: "french"; MessagesFile: "compiler:Languages\French.isl"
|
||||
|
||||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
Name: "associatetorrent"; Description: "Associer les fichiers .torrent à {#MyAppName}"; GroupDescription: "Association de fichiers:"; Flags: unchecked
|
||||
|
||||
[Files]
|
||||
; Ajoutez tous les fichiers nécessaires à votre application
|
||||
Source: "D:\Dev\oxapp25\dist\OxApp.dist\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"
|
||||
Name: "{group}\{cm:UninstallProgram,{#MyAppName}}"; Filename: "{uninstallexe}"
|
||||
Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: desktopicon
|
||||
|
||||
[Registry]
|
||||
; Association des fichiers .torrent
|
||||
Root: HKCR; Subkey: "{#MyAppAssocExt}"; ValueType: string; ValueName: ""; ValueData: "{#MyAppAssocKey}"; Flags: uninsdeletevalue; Tasks: associatetorrent
|
||||
Root: HKCR; Subkey: "{#MyAppAssocKey}"; ValueType: string; ValueName: ""; ValueData: "{#MyAppAssocName}"; Flags: uninsdeletekey; Tasks: associatetorrent
|
||||
Root: HKCR; Subkey: "{#MyAppAssocKey}\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\{#MyAppExeName},0"; Tasks: associatetorrent
|
||||
Root: HKCR; Subkey: "{#MyAppAssocKey}\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#MyAppExeName}"" ""%1"""; Tasks: associatetorrent
|
||||
|
||||
[Run]
|
||||
Filename: "{app}\{#MyAppExeName}"; Description: "{cm:LaunchProgram,{#StringChange(MyAppName, '&', '&&')}}"; Flags: nowait postinstall skipifsilent
|
||||
65
src/async_file.py
Normal file
65
src/async_file.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import asyncio
|
||||
from functools import partial
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
# Créer un executor global pour toutes les opérations fichier
|
||||
_executor = ThreadPoolExecutor()
|
||||
|
||||
|
||||
class AsyncFile:
|
||||
"""Un wrapper pour les opérations de fichier asynchrones basé sur run_in_executor."""
|
||||
|
||||
def __init__(self, file_path, mode='r', *args, **kwargs):
|
||||
self.file_path = file_path
|
||||
self.mode = mode
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.file = None
|
||||
self._loop = asyncio.get_running_loop()
|
||||
|
||||
async def __aenter__(self):
|
||||
# Ouvrir le fichier de façon asynchrone
|
||||
open_func = partial(open, self.file_path, self.mode, *self.args, **self.kwargs)
|
||||
self.file = await self._loop.run_in_executor(_executor, open_func)
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
# Fermer le fichier de façon asynchrone
|
||||
if self.file:
|
||||
await self._loop.run_in_executor(_executor, self.file.close)
|
||||
|
||||
async def write(self, data):
|
||||
"""Écrire des données dans le fichier de façon asynchrone."""
|
||||
if not self.file:
|
||||
raise ValueError("Le fichier n'est pas ouvert")
|
||||
await self._loop.run_in_executor(_executor, self.file.write, data)
|
||||
|
||||
async def read(self, size=-1):
|
||||
"""Lire des données depuis le fichier de façon asynchrone."""
|
||||
if not self.file:
|
||||
raise ValueError("Le fichier n'est pas ouvert")
|
||||
return await self._loop.run_in_executor(_executor, self.file.read, size)
|
||||
|
||||
async def seek(self, offset, whence=0):
|
||||
"""Déplacer le curseur dans le fichier de façon asynchrone."""
|
||||
if not self.file:
|
||||
raise ValueError("Le fichier n'est pas ouvert")
|
||||
return await self._loop.run_in_executor(_executor, self.file.seek, offset, whence)
|
||||
|
||||
async def tell(self):
|
||||
"""Obtenir la position actuelle dans le fichier de façon asynchrone."""
|
||||
if not self.file:
|
||||
raise ValueError("Le fichier n'est pas ouvert")
|
||||
return await self._loop.run_in_executor(_executor, self.file.tell)
|
||||
|
||||
async def flush(self):
|
||||
"""Forcer l'écriture des données en mémoire tampon sur le disque."""
|
||||
if not self.file:
|
||||
raise ValueError("Le fichier n'est pas ouvert")
|
||||
await self._loop.run_in_executor(_executor, self.file.flush)
|
||||
|
||||
|
||||
# Fonction helper pour simplifier l'utilisation (comme aiofiles.open)
|
||||
async def async_open(file_path, mode='r', *args, **kwargs):
|
||||
"""Ouvre un fichier en mode asynchrone, similaire à aiofiles.open."""
|
||||
return AsyncFile(file_path, mode, *args, **kwargs)
|
||||
289
src/conf.py
Normal file
289
src/conf.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from PySide6.QtCore import QObject, QStandardPaths, Signal
|
||||
from pathlib import Path
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import threading
|
||||
import datetime
|
||||
from copy import deepcopy
|
||||
import pickle
|
||||
|
||||
from src.datatypes import ConfType
|
||||
from src.utils import RestrictedUnpickler
|
||||
|
||||
|
||||
|
||||
class ConfManager(QObject):
|
||||
"""
|
||||
Gestionnaire de configuration avec persistance automatique via pickle sécurisé.
|
||||
Maintient un haut niveau de journalisation pour le suivi et le débogage.
|
||||
"""
|
||||
conf_changed = Signal(ConfType)
|
||||
download_location_changed = Signal(str)
|
||||
files_changed = Signal(list)
|
||||
workers_changed = Signal(int)
|
||||
token_changed = Signal(dict)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
|
||||
# Configuration du logger avec niveau détaillé
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.logger.setLevel(logging.DEBUG) # Niveau DEBUG pour capturer tous les messages
|
||||
|
||||
# S'assurer qu'un gestionnaire de log est présent
|
||||
if not self.logger.handlers:
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
handler.setFormatter(formatter)
|
||||
self.logger.addHandler(handler)
|
||||
|
||||
self.logger.info("Initialisation de ConfManager")
|
||||
|
||||
# Verrou pour les opérations de fichier (thread-safety)
|
||||
self._lock = threading.RLock()
|
||||
self.logger.debug("Verrou RLock initialisé")
|
||||
|
||||
# Préparation du répertoire de configuration
|
||||
self.app_config_path = Path(QStandardPaths.writableLocation(QStandardPaths.StandardLocation.AppConfigLocation))
|
||||
self.logger.debug(f"Répertoire de configuration: {self.app_config_path}")
|
||||
|
||||
# cache web
|
||||
self.web_cache_path = self.app_config_path / "web_cache"
|
||||
|
||||
if not self.app_config_path.exists():
|
||||
try:
|
||||
self.app_config_path.mkdir(parents=True, exist_ok=True)
|
||||
self.logger.info(f"Répertoire de configuration créé: {self.app_config_path}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur lors de la création du répertoire de configuration: {e}", exc_info=True)
|
||||
|
||||
# Configuration du fichier de sauvegarde
|
||||
self.conf_file = self.app_config_path / "config.pickle"
|
||||
self.logger.info(f"Fichier de configuration défini: {self.conf_file}")
|
||||
|
||||
# Initialisation avec les valeurs par défaut
|
||||
self.conf = ConfType()
|
||||
self.logger.debug("Objet ConfType initialisé avec valeurs par défaut")
|
||||
|
||||
# Chargement de la configuration
|
||||
if self.load_conf():
|
||||
self.logger.info("Configuration chargée avec succès")
|
||||
else:
|
||||
self.logger.warning("Échec du chargement de la configuration, utilisation des valeurs par défaut")
|
||||
|
||||
def load_conf(self) -> bool:
|
||||
"""
|
||||
Charge la configuration depuis le fichier pickle en utilisant un désérialiseur sécurisé.
|
||||
|
||||
Returns:
|
||||
bool: True si la configuration a été chargée avec succès, False sinon.
|
||||
"""
|
||||
with self._lock:
|
||||
self.logger.debug("Début du chargement de la configuration")
|
||||
|
||||
try:
|
||||
if self.conf_file.exists():
|
||||
self.logger.debug(f"Le fichier de configuration existe: {self.conf_file}")
|
||||
|
||||
with open(self.conf_file, "rb") as f:
|
||||
self.logger.debug("Fichier de configuration ouvert pour lecture")
|
||||
|
||||
# Utilisation du RestrictedUnpickler pour la sécurité
|
||||
unpickler = RestrictedUnpickler(f)
|
||||
self.logger.debug("Désérialisation avec RestrictedUnpickler")
|
||||
|
||||
# Désérialisation avec gestion d'erreurs spécifiques
|
||||
try:
|
||||
loaded_conf = unpickler.load()
|
||||
self.logger.debug("Données désérialisées avec succès")
|
||||
|
||||
# Vérification du type pour s'assurer de la conformité
|
||||
if isinstance(loaded_conf, ConfType):
|
||||
self.conf = loaded_conf
|
||||
self.logger.info("Configuration chargée et vérifiée avec succès")
|
||||
return True
|
||||
else:
|
||||
self.logger.error(f"Le type chargé n'est pas valide: {type(loaded_conf)}")
|
||||
return False
|
||||
|
||||
except pickle.UnpicklingError as ue:
|
||||
self.logger.error(f"Erreur de désérialisation restrictive: {ue}", exc_info=True)
|
||||
self._backup_corrupted_config()
|
||||
return False
|
||||
else:
|
||||
self.logger.warning(f"Fichier de configuration non trouvé, création par défaut: {self.conf_file}")
|
||||
self.save_conf()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur inattendue lors du chargement de la configuration: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def save_conf(self) -> bool:
|
||||
"""
|
||||
Sauvegarde la configuration actuelle dans le fichier pickle.
|
||||
|
||||
Returns:
|
||||
bool: True si la sauvegarde a réussi, False sinon.
|
||||
"""
|
||||
with self._lock:
|
||||
self.logger.debug("Début de la sauvegarde de la configuration")
|
||||
|
||||
try:
|
||||
# Création d'une copie pour éviter les modifications pendant la sérialisation
|
||||
conf_to_save = deepcopy(self.conf)
|
||||
self.logger.debug("Copie profonde de la configuration créée pour la sauvegarde")
|
||||
|
||||
# Sérialisation et écriture du fichier
|
||||
with open(self.conf_file, 'wb') as f:
|
||||
self.logger.debug("Fichier de configuration ouvert pour écriture")
|
||||
pickle.dump(conf_to_save, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
self.logger.debug(f"Sérialisation effectuée avec le protocole {pickle.HIGHEST_PROTOCOL}")
|
||||
|
||||
self.logger.info("Configuration sauvegardée avec succès")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur lors de la sauvegarde de la configuration: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def _backup_corrupted_config(self):
|
||||
"""
|
||||
Crée une sauvegarde horodatée du fichier de configuration corrompu.
|
||||
"""
|
||||
if self.conf_file.exists():
|
||||
try:
|
||||
# Création d'un nom de fichier avec date et heure
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = self.conf_file.with_name(f"{self.conf_file.stem}_{timestamp}_corrupted.bak")
|
||||
self.logger.debug(f"Préparation de la sauvegarde du fichier corrompu vers: {backup_path}")
|
||||
|
||||
# Copie du fichier
|
||||
with open(self.conf_file, 'rb') as src_file:
|
||||
corrupted_data = src_file.read()
|
||||
|
||||
with open(backup_path, 'wb') as backup_file:
|
||||
backup_file.write(corrupted_data)
|
||||
|
||||
self.logger.warning(f"Configuration corrompue sauvegardée dans {backup_path}")
|
||||
|
||||
# Journaliser les détails du fichier corrompu
|
||||
self.logger.debug(f"Taille du fichier corrompu: {os.path.getsize(backup_path)} octets")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Échec de la sauvegarde du fichier corrompu: {e}", exc_info=True)
|
||||
|
||||
def get_value(self, key, default=None):
|
||||
"""
|
||||
Récupère une valeur de la configuration par son nom de clé.
|
||||
|
||||
Args :
|
||||
key : Nom de la propriété à récupérer
|
||||
|
||||
Returns :
|
||||
La valeur associée à la clé ou None si la clé n'existe pas
|
||||
|
||||
"""
|
||||
with self._lock:
|
||||
self.logger.debug(f"Récupération de la valeur pour la clé: {key}")
|
||||
if hasattr(self.conf, key):
|
||||
value = getattr(self.conf, key)
|
||||
self.logger.debug(f"Valeur récupérée pour {key}: {value}")
|
||||
return value
|
||||
else:
|
||||
self.logger.warning(f"Tentative d'accès à une clé inexistante: {key}")
|
||||
return default
|
||||
|
||||
def set_value(self, key, value):
|
||||
"""
|
||||
Définit une valeur dans la configuration et émet le signal approprié.
|
||||
|
||||
Args:
|
||||
key: Nom de la propriété à définir
|
||||
value: Valeur à affecter
|
||||
|
||||
Returns:
|
||||
bool: True si la valeur a été définie avec succès, False sinon
|
||||
"""
|
||||
with self._lock:
|
||||
self.logger.debug(f"Tentative de définition de la valeur pour la clé: {key}")
|
||||
if hasattr(self.conf, key):
|
||||
try:
|
||||
# Stockage de l'ancienne valeur pour la journalisation
|
||||
old_value = getattr(self.conf, key)
|
||||
|
||||
# Définition de la nouvelle valeur
|
||||
setattr(self.conf, key, value)
|
||||
self.logger.info(f"Valeur de {key} modifiée: {old_value} -> {value}")
|
||||
|
||||
# Émission des signaux appropriés
|
||||
if key == "download_location":
|
||||
self.download_location_changed.emit(value)
|
||||
self.logger.debug(f"Signal download_location_changed émis avec {value}")
|
||||
elif key == "files":
|
||||
self.files_changed.emit(list(value.values()))
|
||||
self.logger.debug(f"Signal files_changed émis avec {len(value)} fichiers")
|
||||
elif key == "workers":
|
||||
self.workers_changed.emit(value)
|
||||
self.logger.debug(f"Signal workers_changed émis avec {value}")
|
||||
elif key == "token":
|
||||
self.token_changed.emit(value)
|
||||
self.logger.debug("Signal token_changed émis")
|
||||
|
||||
# Signal global indiquant que la configuration a changé
|
||||
self.conf_changed.emit(self.conf)
|
||||
self.logger.debug("Signal conf_changed émis")
|
||||
|
||||
# Sauvegarde automatique de la configuration
|
||||
self.save_conf()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur lors de la définition de {key}: {e}", exc_info=True)
|
||||
return False
|
||||
else:
|
||||
self.logger.warning(f"Tentative de définition d'une clé inexistante: {key}")
|
||||
return False
|
||||
|
||||
def reset_to_defaults(self):
|
||||
"""
|
||||
Réinitialise la configuration aux valeurs par défaut.
|
||||
|
||||
Returns:
|
||||
bool: True si la réinitialisation a réussi, False sinon
|
||||
"""
|
||||
with self._lock:
|
||||
try:
|
||||
self.logger.info("Début de la réinitialisation de la configuration aux valeurs par défaut")
|
||||
old_conf = deepcopy(self.conf)
|
||||
|
||||
# Création d'une nouvelle instance avec les valeurs par défaut
|
||||
self.conf = ConfType()
|
||||
self.logger.debug("Configuration réinitialisée aux valeurs par défaut")
|
||||
|
||||
# Émission de tous les signaux
|
||||
self.download_location_changed.emit(self.conf.download_location)
|
||||
self.files_changed.emit(list(self.conf.files.values()))
|
||||
self.workers_changed.emit(self.conf.workers)
|
||||
self.token_changed.emit(self.conf.token)
|
||||
self.conf_changed.emit(self.conf)
|
||||
|
||||
self.logger.debug("Tous les signaux émis après réinitialisation")
|
||||
|
||||
# Sauvegarde des nouvelles valeurs
|
||||
if self.save_conf():
|
||||
self.logger.info("Réinitialisation terminée avec succès")
|
||||
return True
|
||||
else:
|
||||
# En cas d'échec, restauration de l'ancienne configuration
|
||||
self.conf = old_conf
|
||||
self.logger.error(
|
||||
"Échec de la sauvegarde après réinitialisation, restauration de l'ancienne configuration")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur lors de la réinitialisation: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
124
src/datatypes.py
Normal file
124
src/datatypes.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import inspect
|
||||
from argparse import FileType
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtCore import QStandardPaths
|
||||
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
|
||||
def cache_property_names(cls):
|
||||
"""Décorateur qui met en cache les noms des propriétés au niveau de la classe"""
|
||||
cls._property_names_cache = [name for name, value in inspect.getmembers(cls, lambda x: isinstance(x, property))]
|
||||
return cls
|
||||
|
||||
|
||||
@dataclass
|
||||
class DataclassWithProperties:
|
||||
@classmethod
|
||||
def _get_property_names(cls):
|
||||
# Vérifier si le cache existe, sinon le créer
|
||||
if not hasattr(cls, '_property_names_cache'):
|
||||
cls._property_names_cache = [name for name, value in
|
||||
inspect.getmembers(cls, lambda x: isinstance(x, property))]
|
||||
return cls._property_names_cache
|
||||
|
||||
def to_dict(self):
|
||||
result = asdict(self)
|
||||
for name in self.__class__._get_property_names():
|
||||
result[name] = getattr(self, name)
|
||||
return result
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConfType:
|
||||
download_location: str
|
||||
files: dict[str, FileType]
|
||||
workers: int
|
||||
token: dict
|
||||
|
||||
def __init__(self,
|
||||
download_location: str = QStandardPaths.writableLocation(QStandardPaths.StandardLocation.DownloadLocation),
|
||||
files: dict = None,
|
||||
workers: int = 5,
|
||||
token: {} = None
|
||||
):
|
||||
if token is None:
|
||||
token = {}
|
||||
if files is None:
|
||||
files = {}
|
||||
self.download_location = download_location
|
||||
self.files = files
|
||||
self.workers = workers
|
||||
self.token = token
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileType:
|
||||
id: str
|
||||
torrent_id: str
|
||||
target: str|Path
|
||||
url: str
|
||||
rel_path: str
|
||||
total_size: int
|
||||
downloaded: bool = False
|
||||
error: str = ""
|
||||
|
||||
def __init__(self, id: str, torrent_id: str, target: str|Path, url: str, rel_path: str, total_size: int, downloaded: bool = False, error: str = ""):
|
||||
self.id = id
|
||||
self.torrent_id = torrent_id
|
||||
if isinstance(target, str):
|
||||
target = Path(target)
|
||||
self.target = target
|
||||
self.url = url
|
||||
self.rel_path = rel_path
|
||||
self.total_size = total_size
|
||||
self.downloaded = downloaded
|
||||
self.error = error
|
||||
|
||||
@property
|
||||
def size_downloaded(self):
|
||||
if self.downloaded:
|
||||
return self.total_size
|
||||
|
||||
target = self.target
|
||||
if isinstance(self.target, str):
|
||||
target = Path(self.target)
|
||||
|
||||
return target.stat().st_size if target.exists() else 0
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Deux fichiers sont considérés égaux s'ils ont le même ID.
|
||||
Cela permet de vérifier si un fichier est dans une collection avec l'opérateur 'in'.
|
||||
"""
|
||||
if not isinstance(other, FileType):
|
||||
return False
|
||||
return self.id == other.id
|
||||
|
||||
def __hash__(self):
|
||||
"""
|
||||
Retourne un hash basé sur l'ID du fichier.
|
||||
Nécessaire pour utiliser les objets FileType comme clés de dictionnaire ou éléments d'ensemble.
|
||||
"""
|
||||
return hash(self.id)
|
||||
|
||||
|
||||
@cache_property_names
|
||||
@dataclass
|
||||
class FileStatsType(DataclassWithProperties):
|
||||
total_size: int = 0
|
||||
downloaded_size: int = 0
|
||||
speed: int = 0
|
||||
|
||||
@property
|
||||
def percent(self):
|
||||
if self.total_size == 0:
|
||||
return 0
|
||||
return self.downloaded_size / self.total_size * 100
|
||||
|
||||
@property
|
||||
def eta(self):
|
||||
if self.speed == 0:
|
||||
return 0
|
||||
return self.total_size / self.speed
|
||||
297
src/download.py
Normal file
297
src/download.py
Normal file
@@ -0,0 +1,297 @@
|
||||
from PySide6.QtCore import QObject, Signal, QTimer
|
||||
from PySide6.QtNetwork import QNetworkCookie
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Literal, TYPE_CHECKING
|
||||
from urllib.parse import urljoin
|
||||
from pathlib import Path
|
||||
import anyio
|
||||
|
||||
import httpx
|
||||
|
||||
from src.datatypes import FileType, FileStatsType
|
||||
from src.async_file import async_open
|
||||
if TYPE_CHECKING:
|
||||
from windows.main_window import MainWindow
|
||||
|
||||
|
||||
class DownloadManager(QObject):
|
||||
status_updated = Signal(dict)
|
||||
stats_updated = Signal(dict)
|
||||
files_updated = Signal(dict)
|
||||
|
||||
def __init__(self, parent: "MainWindow" = None):
|
||||
super().__init__(parent)
|
||||
|
||||
self.conf = parent.conf
|
||||
self.base_url = parent.url
|
||||
|
||||
self.max_worker = 3
|
||||
self.chunk_size = 2 * 1024 * 1024
|
||||
self.pause = True
|
||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
||||
self.tasks: dict[FileType, asyncio.Task] = {}
|
||||
self.task_stats: dict[str, FileStatsType] = {}
|
||||
self.waiter = asyncio.Event()
|
||||
self.client_session: None|httpx.AsyncClient = None
|
||||
self.cookies = []
|
||||
|
||||
# slots
|
||||
# self.status_updated.connect(lambda data: self.conf.set_value("files", self.files))
|
||||
self.files_updated.connect(lambda data: self.conf.set_value("files", data))
|
||||
|
||||
# stats timer
|
||||
self.timer_dl_stats = QTimer(self)
|
||||
self.timer_dl_stats.timeout.connect(self.update_status)
|
||||
self.timer_dl_stats.start(1000)
|
||||
|
||||
# Création d'un logger spécifique pour cette classe
|
||||
self.logger = logging.getLogger('DownloadManager')
|
||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
||||
|
||||
self.status = {}
|
||||
self.update_status()
|
||||
|
||||
async def initialize(self):
|
||||
self.client_session = httpx.AsyncClient(
|
||||
timeout=httpx.Timeout(None),
|
||||
follow_redirects=True,
|
||||
verify=False,
|
||||
# http2=True,
|
||||
)
|
||||
|
||||
for cookie in self.cookies:
|
||||
await self.add_cookie(cookie)
|
||||
self.logger.info("Session aiohttp initialisée")
|
||||
|
||||
async def loop_queue(self):
|
||||
if self.client_session is None:
|
||||
await self.initialize()
|
||||
|
||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
||||
while True:
|
||||
if len(self.tasks) >= self.max_worker or self.pause:
|
||||
await self.wait()
|
||||
else:
|
||||
file = await self.next_file()
|
||||
if file is None:
|
||||
await self.wait()
|
||||
else:
|
||||
self.tasks[file] = asyncio.create_task(self.download_file(file))
|
||||
|
||||
async def wait(self):
|
||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
||||
self.waiter.clear()
|
||||
await self.waiter.wait()
|
||||
self.logger.info("loop queue resumed")
|
||||
|
||||
|
||||
def set_pause(self, value):
|
||||
if self.pause == value:
|
||||
return
|
||||
|
||||
self.pause = value
|
||||
if self.pause:
|
||||
for file_id, task in self.tasks.items():
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
self.logger.info("Tous les téléchargements ont été mis en pause")
|
||||
else:
|
||||
self.pause = False
|
||||
self.waiter.set()
|
||||
self.logger.info("Reprise des téléchargements")
|
||||
self.update_status()
|
||||
|
||||
async def next_file(self) -> FileType | None:
|
||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
||||
for file in self.files.values():
|
||||
if not file.downloaded and file not in self.tasks:
|
||||
self.logger.debug(f"picking file {file}")
|
||||
return file
|
||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
||||
return None
|
||||
|
||||
async def download_file(self, file: FileType):
|
||||
self.logger.info(f"Début du téléchargement: {vars(file)}")
|
||||
|
||||
# construction des stats + vérification si le téléchargement est déjà terminé
|
||||
file_path = anyio.Path(file.target)
|
||||
stats = FileStatsType()
|
||||
stats.total_size = file.total_size
|
||||
file_stats = await file_path.stat() if await file_path.exists() else None
|
||||
stats.downloaded_size = file_stats.st_size if file_stats else 0
|
||||
if stats.downloaded_size >= stats.total_size:
|
||||
file.downloaded = True
|
||||
await self.task_ended(file)
|
||||
return
|
||||
|
||||
await file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.task_stats[file.id] = stats
|
||||
|
||||
# construction du header
|
||||
headers = {}
|
||||
if stats.downloaded_size > 0:
|
||||
headers.update({"Range": f"bytes={stats.downloaded_size}-{stats.total_size}"})
|
||||
|
||||
mode: Literal["ab", "wb"] = "ab" if stats.downloaded_size > 0 else "wb"
|
||||
try:
|
||||
async with self.client_session.stream("GET", file.url, headers=headers) as response:
|
||||
async with await anyio.open_file(file_path, mode) as f:
|
||||
response.raise_for_status()
|
||||
|
||||
last_update_time = time.monotonic()
|
||||
last_downloaded_size = stats.downloaded_size
|
||||
async for chunk in response.aiter_bytes(self.chunk_size):
|
||||
if not chunk:
|
||||
break
|
||||
await f.write(chunk)
|
||||
if self.pause:
|
||||
break
|
||||
chunk_size = len(chunk)
|
||||
stats.downloaded_size += chunk_size
|
||||
|
||||
current_time = time.monotonic()
|
||||
elapsed_time = current_time - last_update_time
|
||||
|
||||
if elapsed_time >= 1.0:
|
||||
bytes_downloaded = stats.downloaded_size - last_downloaded_size
|
||||
current_speed = bytes_downloaded / elapsed_time
|
||||
if stats.speed > 0:
|
||||
stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
|
||||
else:
|
||||
stats.speed = round(current_speed)
|
||||
|
||||
last_update_time = current_time
|
||||
last_downloaded_size = stats.downloaded_size
|
||||
except httpx.HTTPStatusError as e:
|
||||
self.logger.error(f"Erreur HTTP lors du téléchargement de {file.target}: {e.response.status_code} - {e}")
|
||||
file.error = f"Erreur HTTP {e.response.status_code}: {str(e)}"
|
||||
|
||||
except httpx.TimeoutException as e:
|
||||
self.logger.error(f"Délai d'attente dépassé lors du téléchargement de {file.target}: {str(e)}")
|
||||
file.error = f"Délai d'attente dépassé: {str(e)}"
|
||||
|
||||
except httpx.ConnectError as e:
|
||||
self.logger.error(f"Erreur de connexion lors du téléchargement de {file.target}: {str(e)}")
|
||||
file.error = f"Erreur de connexion: {str(e)}"
|
||||
|
||||
except httpx.NetworkError as e:
|
||||
self.logger.error(f"Erreur réseau lors du téléchargement de {file.target}: {str(e)}")
|
||||
file.error = f"Erreur réseau: {str(e)}"
|
||||
|
||||
except httpx.RequestError as e:
|
||||
self.logger.error(f"Erreur de requête lors du téléchargement de {file.target}: {str(e)}")
|
||||
file.error = f"Erreur de requête: {str(e)}"
|
||||
|
||||
except asyncio.CancelledError:
|
||||
self.logger.warning(f"Téléchargement de {file.target} annulé")
|
||||
file.error = "Téléchargement annulé"
|
||||
|
||||
except IOError as e:
|
||||
self.logger.error(f"Erreur d'E/S lors de l'écriture du fichier {file.target}: {str(e)}")
|
||||
file.error = f"Erreur d'E/S: {str(e)}"
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
|
||||
file.error = f"Erreur inattendue: {str(e)}"
|
||||
|
||||
else:
|
||||
file.downloaded = True
|
||||
self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
|
||||
|
||||
finally:
|
||||
await self.task_ended(file)
|
||||
|
||||
async def task_ended(self, file):
|
||||
self.logger.debug(f"Fin de la tâche pour le fichier {file.id}")
|
||||
self.tasks.pop(file)
|
||||
self.logger.debug(f"Tâche supprimée du dictionnaire des tâches actives")
|
||||
|
||||
if file.id in self.task_stats:
|
||||
self.logger.debug(f"Suppression des statistiques pour le fichier {file.id}")
|
||||
self.task_stats.pop(file.id)
|
||||
else:
|
||||
self.logger.debug(f"Aucune statistique trouvée pour le fichier {file.id}")
|
||||
|
||||
# self.logger.debug("Mise à jour du statut du gestionnaire de téléchargement")
|
||||
# self.update_status()
|
||||
|
||||
self.logger.debug("Notification du waiter pour traiter le prochain fichier")
|
||||
self.waiter.set()
|
||||
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def add_files(self, files: list[dict]):
|
||||
base_target_path = Path(self.conf.get_value("download_location"))
|
||||
for file in files:
|
||||
filetype = FileType(
|
||||
id=file["id"],
|
||||
torrent_id=file["torrent"],
|
||||
target=base_target_path / file["rel_name"],
|
||||
url=urljoin(self.base_url, file["download_url"]),
|
||||
rel_path=file["rel_name"],
|
||||
total_size=file["size"],
|
||||
)
|
||||
filetype.downloaded = (
|
||||
True if filetype.target.exists() and filetype.target.stat().st_size == filetype.total_size
|
||||
else False
|
||||
)
|
||||
self.files.setdefault(filetype.id, filetype)
|
||||
self.update_status()
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def del_files(self, file_ids: list[str]):
|
||||
for file_id in file_ids:
|
||||
self.files.pop(file_id)
|
||||
self.update_status()
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def update_status(self):
|
||||
self.status = {
|
||||
"pause": self.pause,
|
||||
"max_worker": self.max_worker,
|
||||
"total_files": len(self.files),
|
||||
"downloaded_files": sum(file.downloaded for file in self.files.values() if file.downloaded),
|
||||
"downloading": [task.id for task in self.tasks.keys()],
|
||||
"total_size": sum(file.total_size for file in self.files.values()),
|
||||
"downloaded_size": sum(file.total_size for file in self.files.values() if file.downloaded) + sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
||||
"speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
||||
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
|
||||
}
|
||||
self.status_updated.emit(self.status)
|
||||
|
||||
# def update_dl_stats(self):
|
||||
# old_stats = deepcopy(self.dl_stats)
|
||||
# self.dl_stats = {
|
||||
# "speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
||||
# "downloaded_size": sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
||||
# "downloading_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()},
|
||||
# }
|
||||
# if old_stats != self.dl_stats:
|
||||
# self.stats_updated.emit(self.dl_stats)
|
||||
# return self.dl_stats
|
||||
|
||||
async def add_cookie(self, cookie: QNetworkCookie):
|
||||
"""
|
||||
Ajoute un QNetworkCookie à la session httpx sans se préoccuper du domaine
|
||||
|
||||
Args:
|
||||
cookie: Un objet QNetworkCookie de PySide6
|
||||
"""
|
||||
if self.client_session is None:
|
||||
# Si la session n'est pas encore initialisée, stocker le cookie pour plus tard
|
||||
self.cookies.append(cookie)
|
||||
return
|
||||
|
||||
# Extraction des informations essentielles du QNetworkCookie
|
||||
name = cookie.name().data().decode()
|
||||
value = cookie.value().data().decode()
|
||||
|
||||
# Ajout direct du cookie sans attributs supplémentaires
|
||||
self.client_session.cookies[name] = value
|
||||
|
||||
self.logger.info(f"Cookie ajouté: {name}={value}")
|
||||
326
src/download_old.py
Normal file
326
src/download_old.py
Normal file
@@ -0,0 +1,326 @@
|
||||
from PySide6.QtCore import QObject, Signal, QTimer
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from typing import Literal, TYPE_CHECKING
|
||||
from urllib.parse import urljoin
|
||||
from pathlib import Path
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
import aiofiles
|
||||
import aiopath
|
||||
import aiohttp
|
||||
from PySide6.QtNetwork import QNetworkCookie
|
||||
|
||||
from src.datatypes import FileType, FileStatsType
|
||||
if TYPE_CHECKING:
|
||||
from windows.main_window import MainWindow
|
||||
|
||||
|
||||
class DownloadManager(QObject):
|
||||
status_updated = Signal(dict)
|
||||
stats_updated = Signal(dict)
|
||||
files_updated = Signal(dict)
|
||||
|
||||
def __init__(self, parent: "MainWindow" = None):
|
||||
super().__init__(parent)
|
||||
|
||||
self.conf = parent.conf
|
||||
self.base_url = parent.url
|
||||
|
||||
self.max_worker = 2
|
||||
self.chunk_size = 1024 * 1024
|
||||
self.pause = True
|
||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
||||
self.tasks: dict[FileType, asyncio.Task] = {}
|
||||
self.task_stats: dict[str, FileStatsType] = {}
|
||||
self.waiter = asyncio.Event()
|
||||
self.client_session: None|aiohttp.ClientSession = None
|
||||
self.cookies = {}
|
||||
|
||||
# slots
|
||||
# self.status_updated.connect(lambda data: self.conf.set_value("files", self.files))
|
||||
self.files_updated.connect(lambda data: self.conf.set_value("files", data))
|
||||
|
||||
# stats timer
|
||||
self.timer_dl_stats = QTimer(self)
|
||||
self.timer_dl_stats.timeout.connect(self.update_status)
|
||||
self.timer_dl_stats.start(2000)
|
||||
|
||||
# Création d'un logger spécifique pour cette classe
|
||||
self.logger = logging.getLogger('DownloadManager')
|
||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
||||
|
||||
self.status = {}
|
||||
self.update_status()
|
||||
|
||||
async def initialize(self):
|
||||
self.client_session = aiohttp.ClientSession()
|
||||
self.logger.info("Session aiohttp initialisée")
|
||||
|
||||
async def loop_queue(self):
|
||||
if self.client_session is None:
|
||||
await self.initialize()
|
||||
|
||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
||||
while True:
|
||||
if len(self.tasks) >= self.max_worker or self.pause:
|
||||
await self.wait()
|
||||
else:
|
||||
file = await self.next_file()
|
||||
if file is None:
|
||||
await self.wait()
|
||||
else:
|
||||
self.tasks[file] = asyncio.create_task(self.download_file(file))
|
||||
|
||||
async def wait(self):
|
||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
||||
self.waiter.clear()
|
||||
await self.waiter.wait()
|
||||
self.logger.info("loop queue resumed")
|
||||
|
||||
|
||||
def set_pause(self, value):
|
||||
if self.pause == value:
|
||||
return
|
||||
|
||||
self.pause = value
|
||||
if self.pause:
|
||||
for file_id, task in self.tasks.items():
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
self.logger.info("Tous les téléchargements ont été mis en pause")
|
||||
else:
|
||||
self.pause = False
|
||||
self.waiter.set()
|
||||
self.logger.info("Reprise des téléchargements")
|
||||
self.update_status()
|
||||
|
||||
async def next_file(self) -> FileType | None:
|
||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
||||
for file in self.files.values():
|
||||
if not file.downloaded and file not in self.tasks:
|
||||
self.logger.debug(f"picking file {file}")
|
||||
return file
|
||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
||||
return None
|
||||
|
||||
async def download_file(self, file: FileType):
|
||||
self.logger.info(f"Début du téléchargement: {vars(file)}")
|
||||
|
||||
# construction des stats + vérification si le téléchargement est déjà terminé
|
||||
file_path = aiopath.AsyncPath(file.target)
|
||||
stats = FileStatsType()
|
||||
stats.total_size = file.total_size
|
||||
stats.downloaded_size = await file_path.stat().st_size if await file_path.exists() else 0
|
||||
if stats.downloaded_size >= stats.total_size:
|
||||
file.downloaded = True
|
||||
await self.task_ended(file)
|
||||
return
|
||||
|
||||
await file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.task_stats[file.id] = stats
|
||||
|
||||
# construction du header
|
||||
headers = {}
|
||||
if stats.downloaded_size > 0:
|
||||
headers.update({"Range": f"bytes={stats.downloaded_size}-{stats.total_size}"})
|
||||
|
||||
mode: Literal["ab", "wb"] = "ab" if stats.downloaded_size > 0 else "wb"
|
||||
try:
|
||||
async with aiofiles.open(file_path, mode) as f:
|
||||
async with self.client_session.get(file.url, cookies=self.cookies) as response:
|
||||
print("Content-Encoding:", response.headers.get('Content-Encoding'))
|
||||
last_update_time = time.monotonic()
|
||||
last_downloaded_size = stats.downloaded_size
|
||||
async for chunk in response.content.iter_chunked(self.chunk_size):
|
||||
if not chunk:
|
||||
break
|
||||
await f.write(chunk)
|
||||
if self.pause:
|
||||
break
|
||||
chunk_size = len(chunk)
|
||||
stats.downloaded_size += chunk_size
|
||||
|
||||
current_time = time.monotonic()
|
||||
elapsed_time = current_time - last_update_time
|
||||
|
||||
if elapsed_time >= 0.5:
|
||||
bytes_downloaded = stats.downloaded_size - last_downloaded_size
|
||||
current_speed = bytes_downloaded / elapsed_time
|
||||
if stats.speed > 0:
|
||||
stats.speed = round(0.7 * current_speed + 0.3 * stats.speed)
|
||||
else:
|
||||
stats.speed = round(current_speed)
|
||||
|
||||
last_update_time = current_time
|
||||
last_downloaded_size = stats.downloaded_size
|
||||
except aiohttp.ClientResponseError as e:
|
||||
self.logger.error(f"Erreur HTTP lors du téléchargement de {file.target}: {e.status} - {e.message}")
|
||||
file.error = f"Erreur HTTP {e.status}: {e.message}"
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
self.logger.error(f"Erreur de connexion lors du téléchargement de {file.target}: {str(e)}")
|
||||
file.error = f"Erreur de connexion: {str(e)}"
|
||||
|
||||
except asyncio.CancelledError:
|
||||
self.logger.warning(f"Téléchargement de {file.target} annulé")
|
||||
file.error = "Téléchargement annulé"
|
||||
# raise # Propager l'exception pour une annulation propre
|
||||
|
||||
except IOError as e:
|
||||
self.logger.error(f"Erreur d'E/S lors de l'écriture du fichier {file.target}: {str(e)}")
|
||||
file.error = f"Erreur d'E/S: {str(e)}"
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur inattendue lors du téléchargement de {file.target}: {type(e).__name__} - {str(e)}")
|
||||
file.error = f"Erreur inattendue: {str(e)}"
|
||||
|
||||
else:
|
||||
file.downloaded = True
|
||||
self.logger.info(f"Téléchargement de {file.target} terminé avec succès")
|
||||
|
||||
finally:
|
||||
await self.task_ended(file)
|
||||
|
||||
async def task_ended(self, file):
|
||||
self.logger.debug(f"Fin de la tâche pour le fichier {file.id}")
|
||||
self.tasks.pop(file)
|
||||
self.logger.debug(f"Tâche supprimée du dictionnaire des tâches actives")
|
||||
|
||||
if file.id in self.task_stats:
|
||||
self.logger.debug(f"Suppression des statistiques pour le fichier {file.id}")
|
||||
self.task_stats.pop(file.id)
|
||||
else:
|
||||
self.logger.debug(f"Aucune statistique trouvée pour le fichier {file.id}")
|
||||
|
||||
# self.logger.debug("Mise à jour du statut du gestionnaire de téléchargement")
|
||||
# self.update_status()
|
||||
|
||||
self.logger.debug("Notification du waiter pour traiter le prochain fichier")
|
||||
self.waiter.set()
|
||||
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def add_files(self, files: list[dict]):
|
||||
base_target_path = Path(self.conf.get_value("download_location"))
|
||||
for file in files:
|
||||
filetype = FileType(
|
||||
id=file["id"],
|
||||
torrent_id=file["torrent"],
|
||||
target=base_target_path / file["rel_name"],
|
||||
url=urljoin(self.base_url, file["download_url"]),
|
||||
rel_path=file["rel_name"],
|
||||
total_size=file["size"],
|
||||
)
|
||||
filetype.downloaded = (
|
||||
True if filetype.target.exists() and filetype.target.stat().st_size == filetype.total_size
|
||||
else False
|
||||
)
|
||||
self.files.setdefault(filetype.id, filetype)
|
||||
self.update_status()
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def del_files(self, file_ids: list[str]):
|
||||
for file_id in file_ids:
|
||||
self.files.pop(file_id)
|
||||
self.update_status()
|
||||
self.files_updated.emit(self.files)
|
||||
|
||||
def update_status(self):
|
||||
self.status = {
|
||||
"pause": self.pause,
|
||||
"max_worker": self.max_worker,
|
||||
"total_files": len(self.files),
|
||||
"downloaded_files": sum(file.downloaded for file in self.files.values() if file.downloaded),
|
||||
"downloading": [task.id for task in self.tasks.keys()],
|
||||
"total_size": sum(file.total_size for file in self.files.values()),
|
||||
"downloaded_size": sum(file.total_size for file in self.files.values() if file.downloaded) + sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
||||
"speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
||||
"downloader_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()}
|
||||
}
|
||||
self.status_updated.emit(self.status)
|
||||
|
||||
# def update_dl_stats(self):
|
||||
# old_stats = deepcopy(self.dl_stats)
|
||||
# self.dl_stats = {
|
||||
# "speed": sum((dl_stat.speed for dl_stat in self.task_stats.values()), 0),
|
||||
# "downloaded_size": sum((dl_stat.downloaded_size for dl_stat in self.task_stats.values()), 0),
|
||||
# "downloading_stats": {key: dl_stat.to_dict() for key, dl_stat in self.task_stats.items()},
|
||||
# }
|
||||
# if old_stats != self.dl_stats:
|
||||
# self.stats_updated.emit(self.dl_stats)
|
||||
# return self.dl_stats
|
||||
|
||||
async def add_cookie(self, cookie):
|
||||
"""
|
||||
Ajoute un QNetworkCookie à la session client.
|
||||
|
||||
Args:
|
||||
cookie (QNetworkCookie): Le cookie PySide6 à ajouter à la session
|
||||
"""
|
||||
|
||||
try:
|
||||
cookie_name = cookie.name().data().decode() if cookie.name() else "Inconnu"
|
||||
self.logger.debug(f"Tentative d'ajout d'un cookie: {cookie_name}")
|
||||
|
||||
if not self.client_session:
|
||||
self.logger.warning("Impossible d'ajouter le cookie: la session client n'est pas initialisée")
|
||||
return
|
||||
|
||||
# Vérification que c'est bien un QNetworkCookie
|
||||
from PySide6.QtNetwork import QNetworkCookie
|
||||
if not isinstance(cookie, QNetworkCookie):
|
||||
self.logger.error(f"Format de cookie invalide: {type(cookie)}, un QNetworkCookie est attendu")
|
||||
return
|
||||
|
||||
# Extraction des informations du QNetworkCookie
|
||||
cookie_name = cookie.name().data().decode()
|
||||
cookie_value = cookie.value().data().decode()
|
||||
cookie_domain = cookie.domain()
|
||||
cookie_path = cookie.path()
|
||||
|
||||
self.logger.debug(
|
||||
f"Informations du cookie - Nom: {cookie_name}, Valeur: {cookie_value[:10]}..., Domaine: {cookie_domain}, Chemin: {cookie_path}")
|
||||
|
||||
# Création du cookie pour aiohttp
|
||||
|
||||
simple_cookie = SimpleCookie()
|
||||
simple_cookie[cookie_name] = cookie_value
|
||||
|
||||
# Ajout des attributs du cookie
|
||||
# if cookie_domain:
|
||||
# self.logger.debug(f"Définition du domaine du cookie: {cookie_domain}")
|
||||
# simple_cookie[cookie_name]['domain'] = cookie_domain
|
||||
|
||||
if cookie_path:
|
||||
simple_cookie[cookie_name]['path'] = cookie_path
|
||||
|
||||
if cookie.isSecure():
|
||||
self.logger.debug(f"Le cookie '{cookie_name}' est marqué comme sécurisé")
|
||||
simple_cookie[cookie_name]['secure'] = True
|
||||
|
||||
if cookie.isHttpOnly():
|
||||
self.logger.debug(f"Le cookie '{cookie_name}' est marqué comme HttpOnly")
|
||||
simple_cookie[cookie_name]['httponly'] = True
|
||||
|
||||
# Ajout de l'expiration si ce n'est pas un cookie de session
|
||||
if not cookie.isSessionCookie():
|
||||
expiration = cookie.expirationDate().toString("yyyy-MM-dd hh:mm:ss")
|
||||
self.logger.debug(f"Le cookie expirera le: {expiration}")
|
||||
# La conversion de date pourrait nécessiter plus de code ici
|
||||
|
||||
# Ajout du cookie à la session aiohttp - la méthode correcte
|
||||
self.client_session.cookie_jar.update_cookies(simple_cookie)
|
||||
|
||||
self.logger.info(f"Cookie '{cookie_name}' ajouté avec succès à la session")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.exception(f"Erreur lors de l'ajout du cookie: {str(e)}")
|
||||
|
||||
|
||||
|
||||
173
src/download_oldold.py
Normal file
173
src/download_oldold.py
Normal file
@@ -0,0 +1,173 @@
|
||||
import aiopath
|
||||
from PySide6.QtCore import Signal, QObject, QTimer, Slot
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import aiofiles
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Dict, Any
|
||||
|
||||
from src.conf import ConfManager
|
||||
from src.datatypes import FileType, FileStatsType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from windows.main_window import MainWindow
|
||||
|
||||
|
||||
|
||||
class DownloadManager(QObject):
|
||||
max_worker = 5
|
||||
downloading: dict[str, "Downloader"] = dict()
|
||||
pause = True
|
||||
chunk_size = 128 * 1024 # 128KB
|
||||
finished = Signal(str)
|
||||
|
||||
file_update = Signal(dict)
|
||||
stats_update = Signal(dict)
|
||||
status_updated = Signal(dict)
|
||||
|
||||
client_session: aiohttp.ClientSession = None
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
self.conf: "ConfManager" = parent.conf
|
||||
|
||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
||||
|
||||
self.waiter = asyncio.Event()
|
||||
# self.downloads = downloads
|
||||
|
||||
self.timer_stats = QTimer(self)
|
||||
self.timer_stats.timeout.connect(self.update_stats)
|
||||
self.timer_stats.start(500)
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return {
|
||||
"pause": self.pause,
|
||||
"max_worker": self.max_worker,
|
||||
"len_total": len(self.files),
|
||||
"len_downloaded": sum(file.downloaded for file in self.files.values()),
|
||||
"downloading": list(self.downloading.keys()),
|
||||
}
|
||||
|
||||
async def read_queue(self):
|
||||
while True:
|
||||
if len(self.downloading) >= self.max_worker or self.pause:
|
||||
await self.waiter.wait()
|
||||
else:
|
||||
file = await self.next_file()
|
||||
if file is None:
|
||||
await self.waiter.wait()
|
||||
else:
|
||||
downloader = Downloader(file, self)
|
||||
downloader.finished.connect(self.file_update.emit)
|
||||
self.downloading[file.id] = downloader
|
||||
asyncio.create_task(downloader.start())
|
||||
|
||||
async def next_file(self):
|
||||
for file in self.files.values():
|
||||
if not file.downloaded and file not in self.downloading:
|
||||
return file
|
||||
|
||||
async def file_finished(self, file):
|
||||
self.downloading.pop(file)
|
||||
self.waiter.set()
|
||||
|
||||
async def add_files(self, files: list[dict]):
|
||||
for file in files:
|
||||
self.files.setdefault(file["id"], FileType) # ajoute l'entré si la clé n'existe pas, si elle existe, ne fait rien
|
||||
self.conf.set_value("files", self.files)
|
||||
self.file_update.emit(self.files.items())
|
||||
|
||||
async def del_files(self, file: str|list[str]):
|
||||
pass
|
||||
|
||||
# @Slot()
|
||||
# def update_stats(self):
|
||||
# if len(self.downloading):
|
||||
# file_stats = {file_id: worker.stats for file_id, worker in self.downloading.items()}
|
||||
# global_stats = {
|
||||
# "speed": sum(stat.speed for stat in file_stats.values()),
|
||||
# "total_size": sum(file.total_size for file in self.files.values()),
|
||||
# "size_downloaded": sum(file.total_size for file in self.files.values() if file.downloaded) + sum(stat.downloaded_size for stat in file_stats.values()),
|
||||
# }
|
||||
#
|
||||
# return {
|
||||
# "global": global_stats,
|
||||
# "files": file_stats
|
||||
# }
|
||||
|
||||
|
||||
|
||||
class Downloader(QObject):
|
||||
finished = Signal(FileType)
|
||||
|
||||
stats = FileStatsType()
|
||||
|
||||
def __init__(self, file: FileType, manager: DownloadManager):
|
||||
super().__init__(manager)
|
||||
self.file = file
|
||||
self.manager = manager
|
||||
self.target_path = aiopath.AsyncPath(self.file.target)
|
||||
# max queue 200 MO environ
|
||||
self.download_queue = asyncio.Queue(maxsize=1024 * self.manager.chunk_size)
|
||||
|
||||
# limiter le buffer
|
||||
|
||||
async def start(self):
|
||||
# todo : prendre en charge la reprise si échec
|
||||
self.stats.total_size = self.file.total_size
|
||||
self.stats.downloaded_size = (await self.target_path.stat()).st_size if await self.target_path.exists() else 0
|
||||
if self.stats.total_size >= self.stats.downloaded_size:
|
||||
self.file.downloaded = True
|
||||
self.finished.emit(self.file)
|
||||
else:
|
||||
download_task = asyncio.create_task(self.download_data())
|
||||
write_task = asyncio.create_task(self.write_to_disk())
|
||||
await asyncio.gather(download_task, write_task)
|
||||
self.finished.emit(self.file)
|
||||
|
||||
async def download_data(self):
|
||||
async with self.manager.client_session.get(self.file.url) as response:
|
||||
last_update_time = time.monotonic()
|
||||
last_downloaded_size = self.stats.downloaded_size
|
||||
|
||||
async for chunk in response.content.iter_chunked(self.manager.chunk_size):
|
||||
await self.download_queue.put(chunk)
|
||||
# while True:
|
||||
# chunk = await response.content.read(self.manager.chunk_size)
|
||||
# if not chunk:
|
||||
# await self.download_queue.put(None)
|
||||
# break
|
||||
# await self.download_queue.put(chunk)
|
||||
|
||||
chunk_size = len(chunk)
|
||||
self.stats.downloaded_size += chunk_size
|
||||
|
||||
# Calcul simple de la vitesse actuelle
|
||||
current_time = time.monotonic()
|
||||
elapsed_time = current_time - last_update_time
|
||||
|
||||
# Mettre à jour la vitesse toutes les 0.5 seconde
|
||||
if elapsed_time >= 0.5:
|
||||
bytes_downloaded = self.stats.downloaded_size - last_downloaded_size
|
||||
current_speed = bytes_downloaded / elapsed_time
|
||||
# Lissage simple : 70% nouvelle valeur + 30% ancienne valeur
|
||||
if self.stats.speed > 0:
|
||||
self.stats.speed = round(0.7 * current_speed + 0.3 * self.stats.speed)
|
||||
else:
|
||||
self.stats.speed = round(current_speed)
|
||||
|
||||
last_update_time = current_time
|
||||
last_downloaded_size = self.stats.downloaded_size
|
||||
await self.download_queue.put(None)
|
||||
|
||||
async def write_to_disk(self):
|
||||
async with aiofiles.open(self.target_path, "wb") as f:
|
||||
while True:
|
||||
chunk = await self.download_queue.get()
|
||||
if chunk is None:
|
||||
break
|
||||
await f.write(chunk)
|
||||
|
||||
88
src/downloader_sync.py
Normal file
88
src/downloader_sync.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from PySide6.QtCore import QObject, Signal, QTimer
|
||||
|
||||
from src.datatypes import FileType, FileStatsType
|
||||
|
||||
|
||||
class DownloadManager(QObject):
|
||||
status_updated = Signal(dict)
|
||||
stats_updated = Signal(dict)
|
||||
files_updated = Signal(dict)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
|
||||
self.conf = parent.conf
|
||||
self.base_url = parent.url
|
||||
|
||||
self.max_worker = 2
|
||||
self.chunk_size = 128*1024
|
||||
self.pause = True
|
||||
self.files: dict[str, FileType] = self.conf.get_value("files", {})
|
||||
self.tasks: dict[FileType, threading.Thread] = {}
|
||||
self.task_stats: dict[str, FileStatsType] = {}
|
||||
self.waiter = threading.Event()
|
||||
self.cookies = []
|
||||
self.lock = threading.Lock()
|
||||
self.stop = False
|
||||
|
||||
# slots
|
||||
self.files_updated.connect(lambda data: self.conf.set_value("files", data))
|
||||
|
||||
# stats timer
|
||||
self.timer_dl_stats = QTimer(self)
|
||||
self.timer_dl_stats.timeout.connect(self.update_status)
|
||||
self.timer_dl_stats.start(2000)
|
||||
|
||||
# Création d'un logger spécifique pour cette classe
|
||||
self.logger = logging.getLogger('DownloadManager')
|
||||
self.logger.info("Initialisation du gestionnaire de téléchargement")
|
||||
self.logger.debug(f"Paramètres: max_worker={self.max_worker}, chunk_size={self.chunk_size}, pause={self.pause}")
|
||||
|
||||
self.status = {}
|
||||
self.update_status()
|
||||
|
||||
def loop_queue(self):
|
||||
self.logger.info("Démarrage de la boucle de téléchargement")
|
||||
while True:
|
||||
if len(self.tasks) >= self.max_worker or self.pause:
|
||||
self.wait()
|
||||
else:
|
||||
file = self.next_file()
|
||||
if file is None:
|
||||
self.wait()
|
||||
else:
|
||||
# todo démarrer la task
|
||||
pass
|
||||
|
||||
def wait(self):
|
||||
self.logger.info("loop queue paused, waiting for tasks to finish...")
|
||||
self.waiter.clear()
|
||||
self.waiter.wait()
|
||||
self.logger.info("loop queue resumed")
|
||||
|
||||
def set_pause(self, value):
|
||||
if self.pause == value:
|
||||
return
|
||||
|
||||
if value:
|
||||
for file_id, task in self.tasks.items():
|
||||
# todo cancel les tasks
|
||||
pass
|
||||
self.logger.info("Tous les téléchargements ont été mis en pause")
|
||||
else:
|
||||
self.waiter.set()
|
||||
self.logger.info("Reprise des téléchargements")
|
||||
self.pause = value
|
||||
self.update_status()
|
||||
|
||||
def next_file(self):
|
||||
self.logger.debug("Recherche du prochain fichier à télécharger")
|
||||
for file in self.files.values():
|
||||
if not file.downloaded and file not in self.tasks:
|
||||
self.logger.debug(f"picking file {file}")
|
||||
return file
|
||||
self.logger.debug("No file found to download, waiting for tasks to finish...")
|
||||
return None
|
||||
96
src/handler.py
Normal file
96
src/handler.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from PySide6.QtCore import QObject, Slot, Signal, QTimer, Property
|
||||
|
||||
import json
|
||||
|
||||
from PySide6.QtWidgets import QFileDialog
|
||||
|
||||
from src.datatypes import FileType
|
||||
from src.download import DownloadManager
|
||||
|
||||
|
||||
class WebHandler(QObject):
|
||||
on_message = Signal(dict)
|
||||
on_site_ready = Signal()
|
||||
|
||||
def __init__(self, download_manager, parent=None):
|
||||
super().__init__(parent)
|
||||
self.download_manager: "DownloadManager" = download_manager
|
||||
self.conf = self.download_manager.conf
|
||||
self.site_ready = False
|
||||
|
||||
self.download_manager.status_updated.connect(lambda data: self.on_message.emit({
|
||||
"context": "status_updated",
|
||||
"content": data
|
||||
}))
|
||||
|
||||
# self.download_manager.stats_updated.connect(lambda data: self.on_message.emit({
|
||||
# "context": "stats_updated",
|
||||
# "content": data
|
||||
# }))
|
||||
|
||||
self.download_manager.files_updated.connect(lambda: self.on_message.emit({
|
||||
"context": "files_updated",
|
||||
"content": {file_id: vars(file) for file_id, file in self.download_manager.files.items()}
|
||||
}))
|
||||
|
||||
@Property(dict)
|
||||
def dm_status(self):
|
||||
return self.download_manager.status
|
||||
|
||||
@Property(dict)
|
||||
def dm_files(self):
|
||||
return {file_id: vars(file) for file_id, file in self.download_manager.files.items()}
|
||||
|
||||
# @Property(dict)
|
||||
# def dm_stats(self):
|
||||
# return self.download_manager.dl_stats
|
||||
|
||||
@Property(str)
|
||||
def dm_download_location(self):
|
||||
return self.conf.get_value("download_location", "")
|
||||
|
||||
@Slot(result=str)
|
||||
def on_site_ready(self):
|
||||
self.site_ready = True
|
||||
self.on_site_ready.emit()
|
||||
|
||||
@Slot(bool)
|
||||
def set_pause(self, value):
|
||||
self.download_manager.set_pause(value)
|
||||
|
||||
@Slot(list)
|
||||
@Slot(str)
|
||||
def add_files(self, files):
|
||||
if isinstance(files, str):
|
||||
files = json.loads(files)
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
self.download_manager.add_files(files)
|
||||
|
||||
@Slot(list)
|
||||
def del_files(self, file_ids):
|
||||
if isinstance(file_ids, str):
|
||||
file_ids = json.loads(file_ids)
|
||||
self.download_manager.del_files(file_ids)
|
||||
|
||||
@Slot()
|
||||
def change_path(self):
|
||||
# Ouvrir la boîte de dialogue pour sélectionner un dossier
|
||||
new_path = QFileDialog.getExistingDirectory(
|
||||
self.parent(),
|
||||
"Sélectionner un dossier de destination",
|
||||
self.conf.get_value("download_location", "")
|
||||
)
|
||||
|
||||
# Vérifier si l'utilisateur a bien sélectionné un dossier (et n'a pas annulé)
|
||||
if new_path:
|
||||
# Mettre à jour le chemin de téléchargement dans la configuration
|
||||
self.conf.set_value("download_location", new_path)
|
||||
# Sauvegarder la configuration si nécessaire
|
||||
# self.download_manager.save_config() # Décommentez si vous avez une méthode pour sauvegarder la configuration
|
||||
|
||||
# Informer l'interface utilisateur du changement
|
||||
self.on_message.emit({
|
||||
"context": "download_location_updated",
|
||||
"content": new_path
|
||||
})
|
||||
44
src/logs.py
Normal file
44
src/logs.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import logging
|
||||
import logging.handlers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from PySide6.QtCore import QStandardPaths
|
||||
|
||||
|
||||
def configure_logging(debug_mode=False):
|
||||
root = logging.getLogger()
|
||||
|
||||
# Toujours configurer le niveau de base à DEBUG pour capturer tous les logs
|
||||
root.setLevel(logging.DEBUG)
|
||||
|
||||
# Supprimer les handlers existants pour éviter les doublons
|
||||
for handler in root.handlers[:]:
|
||||
root.removeHandler(handler)
|
||||
|
||||
# Créer un formateur commun pour tous les handlers
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
|
||||
# Ajouter un handler pour la console uniquement en mode debug
|
||||
if debug_mode:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.INFO)
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
root.addHandler(handler)
|
||||
else:
|
||||
# En mode production, on peut éventuellement configurer un logger vers un fichier
|
||||
# ou simplement ne rien ajouter pour ne pas afficher de logs
|
||||
log_dir = Path(QStandardPaths.writableLocation(QStandardPaths.StandardLocation.AppConfigLocation)) / "logs"
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Configurer un RotatingFileHandler pour limiter la taille des fichiers log
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
log_dir / "application.log",
|
||||
maxBytes=10485760, # 10 MB
|
||||
backupCount=5, # Garder 5 fichiers de backup
|
||||
encoding="utf-8"
|
||||
)
|
||||
file_handler.setLevel(logging.INFO) # Généralement INFO en production
|
||||
file_handler.setFormatter(formatter)
|
||||
root.addHandler(file_handler)
|
||||
96
src/utils.py
Normal file
96
src/utils.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from PySide6.QtNetwork import QNetworkCookie
|
||||
|
||||
import time
|
||||
from http.cookiejar import Cookie
|
||||
import pickle
|
||||
import pathlib
|
||||
|
||||
|
||||
def cookie_qt_to_py(cookie: QNetworkCookie):
|
||||
return Cookie(
|
||||
version=0,
|
||||
name=cookie.name().data().decode(),
|
||||
value=cookie.value().data().decode(),
|
||||
port=None,
|
||||
port_specified=False,
|
||||
domain=cookie.domain(),
|
||||
domain_specified=bool(cookie.domain()),
|
||||
domain_initial_dot=cookie.domain().startswith("."),
|
||||
path=cookie.path(),
|
||||
path_specified=bool(cookie.path()),
|
||||
secure=cookie.isSecure(),
|
||||
expires=int(cookie.expirationDate().toSecsSinceEpoch() - time.time()),
|
||||
discard=False,
|
||||
comment=None,
|
||||
comment_url=None,
|
||||
rest={"HttpOnly": cookie.isHttpOnly()},
|
||||
)
|
||||
|
||||
|
||||
class RestrictedUnpickler(pickle.Unpickler):
|
||||
"""
|
||||
Unpickler restreint qui n'autorise que certaines classes spécifiques lors de la désérialisation.
|
||||
"""
|
||||
|
||||
def find_class(self, module, name):
|
||||
# Liste blanche des modules et classes autorisés
|
||||
allowed_modules = {
|
||||
'datatypes', # Module contenant vos dataclasses
|
||||
'src.datatypes', # Module avec préfixe src
|
||||
'builtins', # Types Python de base
|
||||
'pathlib', # Pour les objets Path
|
||||
'pathlib._local', # Pour les WindowsPath et PosixPath
|
||||
'PySide6.QtCore', # Pour les objets Qt
|
||||
'datetime', # Pour les dates/heures
|
||||
'collections', # Pour les collections standards
|
||||
'__main__' # Pour les classes définies dans le script principal
|
||||
}
|
||||
|
||||
# Vérification si le module est autorisé
|
||||
if module in allowed_modules:
|
||||
if module == 'builtins':
|
||||
# Restriction aux types de base sûrs
|
||||
safe_builtins = {
|
||||
'dict', 'list', 'set', 'tuple', 'bool', 'int', 'float',
|
||||
'str', 'bytes', 'frozenset', 'range', 'complex', 'slice', 'NoneType'
|
||||
}
|
||||
if name in safe_builtins:
|
||||
# On récupère la classe depuis le module builtins
|
||||
import builtins
|
||||
return getattr(builtins, name)
|
||||
elif module == 'datatypes' or module == 'src.datatypes':
|
||||
# Restriction aux classes définies dans datatypes
|
||||
allowed_datatypes = {'ConfType', 'FileType', 'FileStatsType'}
|
||||
if name in allowed_datatypes:
|
||||
try:
|
||||
# Import dynamique du module
|
||||
if module == 'datatypes':
|
||||
datatypes_module = __import__('datatypes')
|
||||
else: # src.datatypes
|
||||
datatypes_module = __import__('src.datatypes', fromlist=['datatypes'])
|
||||
return getattr(datatypes_module, name)
|
||||
except ImportError:
|
||||
# En cas d'erreur, essayer l'autre chemin d'importation
|
||||
try:
|
||||
if module == 'datatypes':
|
||||
# Essayer avec le préfixe src
|
||||
datatypes_module = __import__('src.datatypes', fromlist=['datatypes'])
|
||||
else:
|
||||
# Essayer sans le préfixe src
|
||||
datatypes_module = __import__('datatypes')
|
||||
return getattr(datatypes_module, name)
|
||||
except ImportError:
|
||||
raise pickle.UnpicklingError(f"Impossible d'importer le module {module}")
|
||||
if module == 'pathlib':
|
||||
allowed_path_classes = {'Path', 'PosixPath', 'WindowsPath'}
|
||||
if name in allowed_path_classes:
|
||||
return getattr(pathlib, name)
|
||||
|
||||
else:
|
||||
# Pour les autres modules autorisés, importation normale
|
||||
return super().find_class(module, name)
|
||||
|
||||
# Si non autorisé, lever une exception avec message détaillé
|
||||
raise pickle.UnpicklingError(f"Accès refusé à la classe {module}.{name} pour des raisons de sécurité")
|
||||
|
||||
|
||||
91
windows/main_window.py
Normal file
91
windows/main_window.py
Normal file
@@ -0,0 +1,91 @@
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import base64
|
||||
|
||||
from PySide6.QtCore import QTimer
|
||||
from PySide6.QtGui import QIcon
|
||||
from PySide6.QtWidgets import QMainWindow
|
||||
|
||||
from src.conf import ConfManager
|
||||
from src.download import DownloadManager
|
||||
from src.handler import WebHandler
|
||||
from windows.site_window import SiteWindow
|
||||
|
||||
import resources_rc
|
||||
|
||||
|
||||
class MainWindow(QMainWindow):
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
# Création d'un logger spécifique pour cette classe
|
||||
self.logger = logging.getLogger("MainWindow")
|
||||
self.logger.info("Initialisation de la fenêtre principale")
|
||||
|
||||
# initialise main app
|
||||
self.setWindowTitle("Oxpanel")
|
||||
self.setMinimumSize(1024, 768)
|
||||
self.setWindowIcon(QIcon(":/oxpanel.ico"))
|
||||
self.url = "http://127.0.0.1:8000/" if "--dev" in sys.argv else "https://oxpanel.com/"
|
||||
self.logger.debug(f"URL configurée: {self.url}")
|
||||
|
||||
# initialise every module
|
||||
self.logger.debug("Initialisation des modules")
|
||||
self.conf = ConfManager(self)
|
||||
self.download_manager = DownloadManager(self)
|
||||
self.web_handler = WebHandler(self.download_manager, self)
|
||||
|
||||
# initialise windows
|
||||
self.logger.debug("Initialisation des fenêtres")
|
||||
self.site_window = SiteWindow(self)
|
||||
self.setCentralWidget(self.site_window)
|
||||
|
||||
# connection des signaux
|
||||
self.site_window.on_cookie_added.connect(lambda cookie: asyncio.ensure_future(self.download_manager.add_cookie(cookie)))
|
||||
self.logger.debug("Signaux connectés")
|
||||
|
||||
# initialisation du gestionnaire de téléchargement
|
||||
self.dm_loop = None
|
||||
QTimer.singleShot(0, self.setup_async_tasks)
|
||||
|
||||
self.logger.info("Fenêtre principale initialisée avec succès")
|
||||
|
||||
def setup_async_tasks(self):
|
||||
# Lancer les tâches asyncio une fois que l'application est prête
|
||||
self.logger.debug("Configuration des tâches asynchrones")
|
||||
self.dm_loop = asyncio.ensure_future(self.download_manager.loop_queue())
|
||||
self.logger.debug("File d'attente de téléchargement démarrée")
|
||||
|
||||
def handle_files(self, file_paths):
|
||||
"""
|
||||
Traite les fichiers reçus via "Ouvrir avec" ou d'une autre instance.
|
||||
|
||||
Args:
|
||||
file_paths (list): Liste des chemins de fichiers à traiter
|
||||
"""
|
||||
if not self.web_handler.site_ready:
|
||||
QTimer.singleShot(100, lambda: self.handle_files(file_paths))
|
||||
self.logger.info("Site pas prêt, report de l'envoie des fichiers")
|
||||
return
|
||||
|
||||
self.logger.info(f"Traitement de {len(file_paths)} fichiers reçus")
|
||||
for file_path in file_paths:
|
||||
file_path = Path(file_path)
|
||||
self.logger.debug(f"Traitement du fichier : {file_path}")
|
||||
|
||||
# Exemple: vérifier si le fichier existe
|
||||
if file_path.exists():
|
||||
self.logger.debug(f"Le fichier {file_path} existe")
|
||||
try:
|
||||
with file_path.open("rb") as file:
|
||||
file_content = file.read()
|
||||
encoded_content = base64.b64encode(file_content).decode("utf-8")
|
||||
self.web_handler.on_message.emit({
|
||||
"context": "upload_torrent",
|
||||
"content": encoded_content
|
||||
})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur lors de la lecture ou de l'encodage du fichier {file_path}: {e}")
|
||||
else:
|
||||
self.logger.warning(f"Le fichier {file_path} n'existe pas")
|
||||
157
windows/site_window.py
Normal file
157
windows/site_window.py
Normal file
@@ -0,0 +1,157 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
import sqlite3
|
||||
|
||||
from PySide6.QtWebEngineCore import QWebEngineSettings, QWebEngineProfile, QWebEnginePage, QWebEngineCookieStore
|
||||
from PySide6.QtWidgets import QWidget
|
||||
from PySide6.QtWebEngineWidgets import QWebEngineView
|
||||
from PySide6.QtWebChannel import QWebChannel
|
||||
from PySide6.QtCore import Slot, QFile, QIODevice, Signal, Qt, QStandardPaths, QTimer, QDateTime
|
||||
from PySide6.QtNetwork import QNetworkCookie
|
||||
|
||||
import sys
|
||||
|
||||
from src.conf import ConfManager
|
||||
from src.handler import WebHandler
|
||||
|
||||
|
||||
# https://gitea.devpanel.fr/oxpanel/app/src/branch/master/window/site.py
|
||||
class SiteWindow(QWebEngineView):
|
||||
on_cookie_added = Signal(QNetworkCookie)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
|
||||
self.conf = parent.conf
|
||||
self.web_handler = parent.web_handler
|
||||
|
||||
self.page().profile().cookieStore().deleteAllCookies()
|
||||
|
||||
storage_path = Path(QStandardPaths.writableLocation(QStandardPaths.StandardLocation.AppDataLocation))
|
||||
if not storage_path.exists():
|
||||
storage_path.mkdir(parents=True)
|
||||
|
||||
self.persistent_profile = QWebEngineProfile("OxAppProfile", parent=self)
|
||||
self.cookie_store = self.persistent_profile.cookieStore()
|
||||
self.cookie_store.cookieAdded.connect(self.on_cookie_added.emit)
|
||||
self.cookie_store.cookieAdded.connect(self.test_cookie)
|
||||
self.persistent_profile.setHttpCacheType(QWebEngineProfile.HttpCacheType.MemoryHttpCache)
|
||||
self.persistent_profile.setPersistentStoragePath(str(self.conf.app_config_path / "web_cache"))
|
||||
self.persistent_profile.setPersistentCookiesPolicy(QWebEngineProfile.PersistentCookiesPolicy.AllowPersistentCookies)
|
||||
self.persistent_profile.setHttpUserAgent("oxapp25")
|
||||
|
||||
custom_page = QWebEnginePage(self.persistent_profile, parent=self)
|
||||
self.setPage(custom_page)
|
||||
|
||||
self.settings().setAttribute(QWebEngineSettings.WebAttribute.Accelerated2dCanvasEnabled, True)
|
||||
self.settings().setAttribute(QWebEngineSettings.WebAttribute.ScrollAnimatorEnabled, True)
|
||||
self.settings().setAttribute(QWebEngineSettings.WebAttribute.WebGLEnabled, True)
|
||||
# self.settings().setAttribute(QWebEngineSettin gs.WebAttribute.LocalStorageEnabled, False)
|
||||
# self.settings().setAttribute(QWebEngineSettings.WebAttribute.AutoLoadImages, True)
|
||||
# self.settings().setAttribute(QWebEngineSettings.WebAttribute.PluginsEnabled, False)
|
||||
self.settings().setAttribute(QWebEngineSettings.WebAttribute.ShowScrollBars, True)
|
||||
self.page().setBackgroundColor(Qt.GlobalColor.white)
|
||||
|
||||
self.web_channel = QWebChannel(self)
|
||||
self.web_channel.registerObject("handler", self.web_handler)
|
||||
self.page().setWebChannel(self.web_channel)
|
||||
|
||||
self.loadFinished.connect(self.on_load_finished)
|
||||
|
||||
self.load(parent.url)
|
||||
|
||||
def test_cookie(self, *args, **kwargs):
|
||||
print("cook", *args, **kwargs)
|
||||
# self.cookie_store.loadAllCookies()
|
||||
|
||||
|
||||
@Slot(bool)
|
||||
def on_load_finished(self, is_success):
|
||||
print("load finished")
|
||||
if is_success:
|
||||
api_file = QFile(":/qtwebchannel/qwebchannel.js")
|
||||
api_file.open(QIODevice.OpenModeFlag.ReadOnly)
|
||||
api_content = api_file.readAll().data().decode("utf-8")
|
||||
api_file.close()
|
||||
self.page().runJavaScript(api_content)
|
||||
|
||||
# fix main scrollbar
|
||||
css = """
|
||||
html::-webkit-scrollbar,
|
||||
body::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
html, body {
|
||||
scrollbar-width: none; /* Pour Firefox */
|
||||
-ms-overflow-style: none; /* Pour Internet Explorer et Edge */
|
||||
overflow: auto; /* Conserver la capacité de défilement */
|
||||
}
|
||||
"""
|
||||
js_code = f"var style = document.createElement('style'); style.textContent = `{css}`; document.head.appendChild(style);"
|
||||
self.page().runJavaScript(js_code)
|
||||
|
||||
self.recreate_sessionid_cookie()
|
||||
|
||||
def sessionid_from_cookie_store(self) -> None|sqlite3.Row:
|
||||
# Cette approche est expérimentale et dépend de l'implémentation interne
|
||||
profile_path = self.persistent_profile.persistentStoragePath()
|
||||
cookies_db_path = os.path.join(profile_path, "Cookies")
|
||||
|
||||
if os.path.exists(cookies_db_path):
|
||||
try:
|
||||
conn = sqlite3.connect(cookies_db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM cookies WHERE name='sessionid'")
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if result:
|
||||
return result # La valeur du cookie
|
||||
except Exception as e:
|
||||
print(f"Erreur lors de l'accès au fichier de cookies: {e}")
|
||||
return None
|
||||
|
||||
def recreate_sessionid_cookie(self):
|
||||
cookie_info = self.sessionid_from_cookie_store()
|
||||
if cookie_info is None:
|
||||
return
|
||||
# Créer un nouveau cookie avec le nom et la valeur
|
||||
cookie = QNetworkCookie(b"sessionid", cookie_info['value'].encode())
|
||||
|
||||
# Configurer toutes les propriétés disponibles
|
||||
cookie.setDomain(cookie_info['host_key'])
|
||||
cookie.setPath(cookie_info['path'])
|
||||
|
||||
# Gérer les dates d'expiration (conversion du format si nécessaire)
|
||||
if cookie_info['has_expires']:
|
||||
# Convertir depuis le format UNIX en QDateTime
|
||||
# Attention: Chromium stocke les dates en microseconds depuis 1601-01-01 UTC
|
||||
# Vous devrez peut-être ajuster cette conversion selon le format exact
|
||||
expires = QDateTime.fromSecsSinceEpoch(int(cookie_info['expires_utc'] / 1000000) - 11644473600)
|
||||
cookie.setExpirationDate(expires)
|
||||
|
||||
# Configurer les attributs de sécurité
|
||||
cookie.setSecure(bool(cookie_info['is_secure']))
|
||||
cookie.setHttpOnly(bool(cookie_info['is_httponly']))
|
||||
|
||||
# Configurer SameSite si disponible dans votre version de PySide6
|
||||
# Vérifier si l'attribut est disponible (PySide6 ≥ 6.2.0)
|
||||
if hasattr(cookie, 'setSameSitePolicy'):
|
||||
# Conversion des valeurs numériques en constantes SameSite
|
||||
samesite_value = cookie_info['samesite']
|
||||
samesite_map = {
|
||||
0: QNetworkCookie.SameSite.None_, # None
|
||||
1: QNetworkCookie.SameSite.Lax, # Lax
|
||||
2: QNetworkCookie.SameSite.Strict # Strict
|
||||
}
|
||||
if samesite_value in samesite_map:
|
||||
cookie.setSameSitePolicy(samesite_map[samesite_value])
|
||||
|
||||
# Ajouter le cookie au store - cela va déclencher le signal cookieAdded
|
||||
self.cookie_store.setCookie(cookie)
|
||||
|
||||
def contextMenuEvent(self, event):
|
||||
# Ignorer l'événement pour empêcher l'affichage du menu contextuel
|
||||
event.ignore()
|
||||
|
||||
Reference in New Issue
Block a user