Files
Py-Backup/core/backup_manager.py
Désiré Werner Menrath dbaa623b17 fix(backup): Resolve multiple issues in encrypted backup handling
This commit addresses several bugs related to the mounting, unmounting, and deletion of encrypted backups, as well as a crash when listing backups.

The key changes are:
- **Fix Double Mount on View:** Removed redundant mount operation when viewing encrypted backup contents. The mount is now handled by a single, centralized function.
- **Fix Deletion of Encrypted Backups:**
    - The container is no longer re-mounted if already open, preventing a second password prompt.
    - Deletion of encrypted *user* backups is now performed with user-level permissions, removing the need for a third password prompt via pkexec.
- **Fix UI Refresh after Deletion:** The backup list now correctly refreshes after a backup is deleted.
- **Fix Crash on Empty Backup List:** Resolved an `UnboundLocalError` that occurred when listing backups from an empty or non-existent backup directory.
- **Improve Mount Detection:** The `is_mounted` check is now more robust to prevent race conditions or other OS-level inconsistencies.
2025-09-07 19:02:39 +02:00

478 lines
24 KiB
Python

import subprocess
import os
import threading
import re
import signal
import datetime
import math
import shutil
from typing import Optional, List, Dict, Any
from pathlib import Path
from crontab import CronTab
import tempfile
import stat
import shutil
from core.pbp_app_config import AppConfig
from pyimage_ui.password_dialog import PasswordDialog
from core.encryption_manager import EncryptionManager
class BackupManager:
"""
Handles the logic for creating and managing backups using rsync.
"""
def __init__(self, logger, app=None):
self.logger = logger
self.process = None
self.app_tag = "# Py-Backup Job"
self.is_system_process = False
self.app = app
self.encryption_manager = EncryptionManager(logger, app)
self.inhibit_cookie = None
def _inhibit_screensaver(self):
if not shutil.which("gdbus"):
return
try:
self.logger.log("Attempting to inhibit screensaver and power management.")
command = [
"gdbus", "call", "--session", "--dest", "org.freedesktop.ScreenSaver",
"--object-path", "/org/freedesktop/ScreenSaver",
"--method", "org.freedesktop.ScreenSaver.Inhibit",
"Py-Backup", "Backup in progress"
]
result = subprocess.run(command, capture_output=True, text=True, check=True)
match = re.search(r'uint32\s+(\d+)', result.stdout)
if match:
self.inhibit_cookie = int(match.group(1))
self.logger.log(f"Successfully inhibited screensaver with cookie {self.inhibit_cookie}")
except Exception as e:
self.logger.log(f"An unexpected error occurred while inhibiting screensaver: {e}")
def _uninhibit_screensaver(self):
if self.inhibit_cookie is None: return
if not shutil.which("gdbus"): return
try:
self.logger.log(f"Attempting to uninhibit screensaver with cookie {self.inhibit_cookie}")
command = [
"gdbus", "call", "--session", "--dest", "org.freedesktop.ScreenSaver",
"--object-path", "/org/freedesktop/ScreenSaver",
"--method", "org.freedesktop.ScreenSaver.UnInhibit",
str(self.inhibit_cookie)
]
subprocess.run(command, capture_output=True, text=True, check=True)
self.logger.log("Successfully uninhibited screensaver.")
except Exception as e:
self.logger.log(f"Failed to uninhibit screensaver: {e}")
finally:
self.inhibit_cookie = None
def start_backup(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool = False, exclude_files: Optional[List[Path]] = None, source_size: int = 0, is_compressed: bool = False, is_encrypted: bool = False, mode: str = "incremental", use_trash_bin: bool = False, no_trash_bin: bool = False):
self.is_system_process = is_system
self._inhibit_screensaver()
mount_point = None
if is_encrypted:
base_dest_path = os.path.dirname(dest_path)
mount_point = self.encryption_manager.prepare_encrypted_destination(
base_dest_path, is_system, source_size, queue)
if not mount_point:
self.logger.log("Failed to prepare encrypted destination. Aborting backup.")
queue.put(('completion', {'status': 'error', 'returncode': -1}))
self._uninhibit_screensaver()
return None
thread = threading.Thread(target=self._run_backup_path, args=(
queue, source_path, dest_path, is_system, is_dry_run, exclude_files, source_size, is_compressed, is_encrypted, mode, mount_point, use_trash_bin, no_trash_bin))
thread.daemon = True
thread.start()
return thread
def _run_backup_path(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool, exclude_files: Optional[List[Path]], source_size: int, is_compressed: bool, is_encrypted: bool, mode: str, mount_point: Optional[str], use_trash_bin: bool, no_trash_bin: bool):
base_dest_path = os.path.dirname(dest_path)
try:
pybackup_dir = os.path.join(base_dest_path, "pybackup")
backup_name = os.path.basename(dest_path)
user_source_name = None
if not is_system:
match = re.match(r"^(\d{2}-\d{2}-\d{4}_\d{2}:\d{2}:\d{2})_user_(.+?)_(full|incremental)", backup_name)
if match: user_source_name = match.group(2)
if is_encrypted:
if not mount_point: raise Exception("Encrypted backup run without a mount point.")
rsync_base_dest = mount_point
else:
rsync_base_dest = os.path.join(pybackup_dir, "user_backups") if not is_system else pybackup_dir
if not is_system:
if user_source_name: rsync_base_dest = os.path.join(rsync_base_dest, user_source_name)
rsync_dest = os.path.join(rsync_base_dest, backup_name)
if not os.path.exists(rsync_base_dest):
if not is_system:
os.makedirs(rsync_base_dest, exist_ok=True)
else:
self.encryption_manager._execute_as_root(f"mkdir -p \"{rsync_base_dest}\"")
latest_backup_path = self._find_latest_backup(rsync_base_dest, is_system)
if not is_system and not latest_backup_path: mode = "full"
elif not is_system and latest_backup_path: mode = "incremental"
command = ['pkexec', 'rsync', '-aAXHvL'] if is_system else ['rsync', '-avL']
if mode == "incremental" and latest_backup_path and not is_dry_run: command.append(f"--link-dest={latest_backup_path}")
command.extend(['--info=progress2'])
if exclude_files: command.extend([f"--exclude-from={f}" for f in exclude_files])
if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists(): command.append(f"--exclude-from={AppConfig.MANUAL_EXCLUDE_LIST_PATH}")
if is_dry_run: command.append('--dry-run')
if not is_system:
trash_bin_path = os.path.join(rsync_base_dest, ".Trash")
if use_trash_bin: command.extend(['--backup', f'--backup-dir={trash_bin_path}', '--delete'])
elif no_trash_bin: command.append('--delete')
if use_trash_bin or no_trash_bin: command.append(f"--exclude={os.path.basename(trash_bin_path)}/")
command.extend([source_path, rsync_dest])
self.logger.log(f"Rsync command: {' '.join(command)}")
transferred_size, total_size, stderr = self._execute_rsync(queue, command)
return_code = self.process.returncode if self.process else -1
if self.process:
status = 'success' if return_code == 0 else 'warning' if return_code in [23, 24] else 'cancelled' if return_code in [143, -15, 15, -9] else 'error'
if status in ['success', 'warning'] and not is_dry_run:
final_size = transferred_size if (mode == 'incremental' and latest_backup_path) else (total_size or source_size)
self._create_info_file(pybackup_dir, backup_name, final_size, is_encrypted)
queue.put(('completion', {'status': status, 'returncode': return_code}))
finally:
# The container is intentionally left mounted for user convenience.
# It will be unmounted when the application closes.
self._uninhibit_screensaver()
self.process = None
def _prepare_and_get_mounted_path(self, base_dest_path: str, is_system: bool, mount_if_needed: bool) -> Optional[str]:
if not self.encryption_manager.is_encrypted(base_dest_path):
return None
if not self.encryption_manager.is_mounted(base_dest_path):
if mount_if_needed:
if not self.encryption_manager.prepare_encrypted_destination(base_dest_path, is_system=is_system, source_size=0, queue=self.app.queue):
return None
else:
return None
if self.encryption_manager.is_mounted(base_dest_path):
pybackup_dir = os.path.join(base_dest_path, "pybackup")
return os.path.join(pybackup_dir, "encrypted")
return None
def list_all_backups(self, base_dest_path: str, mount_if_needed: bool = True):
mounted_path = self._prepare_and_get_mounted_path(base_dest_path, is_system=False, mount_if_needed=mount_if_needed)
return self._list_all_backups_from_path(base_dest_path, mounted_path)
def _list_all_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None):
system_backups = self._list_system_backups_from_path(base_dest_path, mounted_path)
user_backups = self._list_user_backups_from_path(base_dest_path, mounted_path)
return system_backups, user_backups
def list_system_backups(self, base_dest_path: str, mount_if_needed: bool = True) -> Optional[List[Dict[str, str]]]:
mounted_path = self._prepare_and_get_mounted_path(base_dest_path, is_system=True, mount_if_needed=mount_if_needed)
if self.encryption_manager.is_encrypted(base_dest_path) and not mounted_path:
return None
return self._list_system_backups_from_path(base_dest_path, mounted_path)
def _list_backups(self, base_dest_path: str, mounted_path: Optional[str], name_regex: re.Pattern, backup_type_prefix: str) -> List[Dict[str, str]]:
pybackup_dir = os.path.join(base_dest_path, "pybackup")
if not os.path.isdir(pybackup_dir):
return []
is_system = backup_type_prefix == "system"
all_backups = []
for item in os.listdir(pybackup_dir):
match = name_regex.match(item)
if not match:
continue
groups = match.groups()
date_str, time_str = groups[0], groups[1]
if is_system:
backup_type_base, comp_ext, enc_suffix = groups[2], groups[3], groups[4]
source_name = None
is_compressed = comp_ext is not None
else:
source_name, backup_type_base, enc_suffix = groups[2], groups[3], groups[4]
is_compressed = False
is_encrypted = enc_suffix is not None
backup_name = item.replace(".txt", "").replace("_encrypted", "")
if mounted_path:
if is_system:
full_path = os.path.join(mounted_path, backup_name)
else:
user_backup_dir = os.path.join(mounted_path, source_name)
full_path = os.path.join(user_backup_dir, backup_name)
else:
if is_system:
full_path = os.path.join(pybackup_dir, backup_name)
else:
user_backups_dir = os.path.join(pybackup_dir, "user_backups", source_name)
full_path = os.path.join(user_backups_dir, backup_name)
backup_type = backup_type_base.capitalize()
if is_compressed:
backup_type += " (Compressed)"
if is_encrypted:
backup_type += " (Encrypted)"
backup_size, comment = "N/A", ""
info_file_path = os.path.join(pybackup_dir, item)
if os.path.exists(info_file_path):
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("originalgröße:"):
backup_size = line.split(":", 1)[1].strip().split('(')[0].strip()
elif line.strip().lower().startswith("kommentar:"):
comment = line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Could not read info file {info_file_path}: {e}")
backup_info = {
"date": date_str, "time": time_str, "type": backup_type, "size": backup_size,
"folder_name": backup_name, "full_path": full_path, "comment": comment,
"is_encrypted": is_encrypted, "is_compressed": is_compressed,
"backup_type_base": backup_type_base.capitalize(),
"datetime": datetime.datetime.strptime(f"{date_str} {time_str}", '%d-%m-%Y %H:%M:%S')
}
if not is_system:
backup_info["source"] = source_name
all_backups.append(backup_info)
if is_system:
all_backups.sort(key=lambda x: x['datetime'])
grouped_backups = []
current_group = []
for backup in all_backups:
if backup['backup_type_base'] == 'Full':
if current_group:
grouped_backups.append(current_group)
current_group = [backup]
else:
if not current_group:
current_group.append(backup)
else:
current_group.append(backup)
if current_group:
grouped_backups.append(current_group)
grouped_backups.sort(key=lambda g: g[0]['datetime'], reverse=True)
return [item for group in grouped_backups for item in group]
else:
all_backups.sort(key=lambda x: x['datetime'], reverse=True)
return all_backups
def _list_system_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
name_regex = re.compile(r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_system_(full|incremental)(\.tar\.gz)?(_encrypted)?\.txt$", re.IGNORECASE)
return self._list_backups(base_dest_path, mounted_path, name_regex, "system")
def _list_user_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
name_regex = re.compile(r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_user_(.+?)_(full|incremental)(_encrypted)?\.txt$", re.IGNORECASE)
return self._list_backups(base_dest_path, mounted_path, name_regex, "user")
def _find_latest_backup(self, base_backup_path: str, is_system: bool) -> Optional[str]:
self.logger.log(f"Searching for latest backup in: {base_backup_path}")
backup_names = []
if os.path.isdir(base_backup_path):
for item in os.listdir(base_backup_path):
if os.path.isdir(os.path.join(base_backup_path, item)):
if is_system:
if "_system_" in item: backup_names.append(item)
else:
backup_names.append(item)
backup_names.sort(reverse=True)
if not backup_names: return None
latest_backup_path = os.path.join(base_backup_path, backup_names[0])
if os.path.isdir(latest_backup_path):
self.logger.log(f"Found latest backup for --link-dest: {latest_backup_path}")
return latest_backup_path
return None
def _create_info_file(self, pybackup_dir: str, backup_name: str, source_size: int, is_encrypted: bool):
try:
info_filename = f"{backup_name}{'_encrypted' if is_encrypted else ''}.txt"
info_file_path = os.path.join(pybackup_dir, info_filename)
if source_size > 0:
power, n = 1024, 0
power_labels = {0: 'B', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'}
display_size = source_size
while display_size >= power and n < len(power_labels) - 1:
display_size /= power
n += 1
size_str = f"{display_size:.2f} {power_labels[n]}"
else:
size_str = "0 B"
date_str = datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S")
info_content = (f"Backup-Datum: {date_str}\n" f"Originalgröße: {size_str} ({source_size} Bytes)\n")
with open(info_file_path, 'w') as f: f.write(info_content)
self.logger.log(f"Successfully created metadata file: {info_file_path}")
except Exception as e:
self.logger.log(f"Failed to create metadata file for {pybackup_dir}. Error: {e}")
def _execute_rsync(self, queue, command: List[str]):
transferred_size, total_size, stderr_output = 0, 0, ""
try:
env = os.environ.copy()
env['LC_ALL'] = 'C'
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1, preexec_fn=os.setsid, env=env)
if self.process.stdout:
for line in iter(self.process.stdout.readline, ''):
stripped_line = line.strip()
self.logger.log(f"Rsync stdout: {stripped_line}")
if '%' in stripped_line:
match = re.search(r'\s*(\d+)%\s+', stripped_line)
if match: queue.put(('progress', int(match.group(1))))
elif stripped_line and not stripped_line.startswith(('sending', 'sent', 'total')):
queue.put(('file_update', stripped_line))
self.process.wait()
if self.process.stderr:
stderr_output = self.process.stderr.read()
if stderr_output: self.logger.log(f"Rsync Stderr: {stderr_output.strip()}")
except FileNotFoundError:
self.logger.log(f"Error: '{command[0]}' not found.")
queue.put(('error', None))
except Exception as e:
self.logger.log(f"Rsync execution error: {e}")
queue.put(('error', None))
return transferred_size, total_size, stderr_output
def start_restore(self, source_path: str, dest_path: str, is_compressed: bool):
from queue import Queue
queue = self.app.queue if hasattr(self.app, 'queue') else Queue()
thread = threading.Thread(target=self._run_restore, args=(queue, source_path, dest_path, is_compressed))
thread.daemon = True
thread.start()
def _run_restore(self, queue, source_path: str, dest_path: str, is_compressed: bool):
self.logger.log(f"Starting restore from {source_path} to {dest_path}")
status = 'error'
try:
source = source_path.rstrip('/') + '/'
script_content = f"tar -xzf '{source_path}' -C '{dest_path}'" if is_compressed else f"rsync -aAXHv '{source}' '{dest_path}'"
if self.encryption_manager._execute_as_root(script_content):
status = 'success'
else:
self.logger.log("Restore script failed.")
except Exception as e:
self.logger.log(f"An unexpected error occurred during restore: {e}")
finally:
queue.put(('completion', {'status': status, 'returncode': 0 if status == 'success' else 1}))
def get_scheduled_jobs(self) -> List[Dict[str, Any]]:
jobs_list = []
try:
user_cron = CronTab(user=True)
for job in user_cron:
if self.app_tag in job.comment:
details = self._parse_job_comment(job.comment)
if details: jobs_list.append(details)
except Exception as e:
self.logger.log(f"Error loading cron jobs: {e}")
return jobs_list
def start_delete_backup(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str] = None):
thread = threading.Thread(target=self._run_delete, args=(
path_to_delete, info_file_path, is_encrypted, is_system, base_dest_path, queue, password))
thread.daemon = True
thread.start()
def _run_delete(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str]):
try:
if is_encrypted:
self.logger.log(f"Starting encrypted deletion for {path_to_delete}")
mount_point = None
if self.encryption_manager.is_mounted(base_dest_path):
pybackup_dir = os.path.join(base_dest_path, "pybackup")
mount_point = os.path.join(pybackup_dir, "encrypted")
else:
if password:
mount_point = self.encryption_manager.mount_for_deletion(base_dest_path, is_system, password)
else:
self.logger.log("Password not provided for encrypted deletion.")
if not mount_point:
self.logger.log("Failed to unlock container for deletion.")
queue.put(('deletion_complete', False))
return
internal_path_to_delete = os.path.join(mount_point, os.path.basename(path_to_delete))
success = False
if is_system:
script_content = f"rm -rf '{internal_path_to_delete}'\nrm -f '{info_file_path}'"
success = self.encryption_manager._execute_as_root(script_content)
else: # User backup, no root needed
try:
if os.path.isdir(internal_path_to_delete):
shutil.rmtree(internal_path_to_delete)
if os.path.exists(info_file_path):
os.remove(info_file_path)
self.logger.log(f"Successfully deleted {internal_path_to_delete} and {info_file_path}")
success = True
except Exception as e:
self.logger.log(f"Failed to delete user backup {internal_path_to_delete}: {e}")
success = False
if success:
self.logger.log("Encrypted backup deleted successfully.")
queue.put(('deletion_complete', True))
else:
self.logger.log("Failed to delete files within encrypted container.")
queue.put(('deletion_complete', False))
elif is_system:
script_content = f"rm -rf '{path_to_delete}'\nrm -f '{info_file_path}'"
if self.encryption_manager._execute_as_root(script_content):
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
else:
self.logger.log(f"Failed to delete {path_to_delete}")
queue.put(('deletion_complete', False))
else:
try:
if os.path.isdir(path_to_delete): shutil.rmtree(path_to_delete)
if os.path.exists(info_file_path): os.remove(info_file_path)
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
except Exception as e:
self.logger.log(f"Failed to delete unencrypted user backup {path_to_delete}: {e}")
queue.put(('deletion_complete', False))
except Exception as e:
self.logger.log(f"Error during threaded deletion: {e}")
queue.put(('deletion_complete', False))
def cancel_and_delete_privileged_backup(self, delete_path: str):
if not self.process or self.process.poll() is not None: return
self.logger.log("Attempting to cancel backup and delete directory with root privileges...")
try:
pgid = os.getpgid(self.process.pid)
script_content = f"""
kill -SIGTERM -- -{pgid} || echo 'Process group not found or already terminated.'
if [ -n "{delete_path}" ] && [ "{delete_path}" != "/" ]; then rm -rf "{delete_path}"; fi
"""
self.encryption_manager._execute_as_root(script_content)
except Exception as e:
self.logger.log(f"An error occurred during privileged cancel and delete: {e}")