Files
Py-Backup/core/backup_manager.py
Désiré Werner Menrath 73e6e42485 Refactor: Encrypted backups to use direct LUKS
Replaced the LVM-on-a-file implementation with a more robust, industry-standard LUKS-on-a-file approach.

This change was motivated by persistent and hard-to-debug errors related to LVM state management and duplicate loop device detection during repeated mount/unmount cycles.

The new implementation provides several key benefits:
- **Robustness:** Eliminates the entire LVM layer, which was the root cause of the mount/unmount failures.
- **Improved UX:** Drastically reduces the number of password prompts for encrypted user backups. By changing ownership of the mountpoint, rsync can run with user privileges.
- **Enhanced Security:** The file transfer process (rsync) for user backups no longer runs with root privileges.
- **Better Usability:** Encrypted containers are now left mounted during the application's lifecycle and are only unmounted on exit, improving workflow for consecutive operations.
2025-09-07 15:58:28 +02:00

455 lines
24 KiB
Python

import subprocess
import os
import threading
import re
import signal
import datetime
import math
import shutil
from typing import Optional, List, Dict, Any
from pathlib import Path
from crontab import CronTab
import tempfile
import stat
import shutil
from core.pbp_app_config import AppConfig
from pyimage_ui.password_dialog import PasswordDialog
from core.encryption_manager import EncryptionManager
class BackupManager:
"""
Handles the logic for creating and managing backups using rsync.
"""
def __init__(self, logger, app=None):
self.logger = logger
self.process = None
self.app_tag = "# Py-Backup Job"
self.is_system_process = False
self.app = app
self.encryption_manager = EncryptionManager(logger, app)
self.inhibit_cookie = None
def _inhibit_screensaver(self):
if not shutil.which("gdbus"):
return
try:
self.logger.log("Attempting to inhibit screensaver and power management.")
command = [
"gdbus", "call", "--session", "--dest", "org.freedesktop.ScreenSaver",
"--object-path", "/org/freedesktop/ScreenSaver",
"--method", "org.freedesktop.ScreenSaver.Inhibit",
"Py-Backup", "Backup in progress"
]
result = subprocess.run(command, capture_output=True, text=True, check=True)
match = re.search(r'uint32\s+(\d+)', result.stdout)
if match:
self.inhibit_cookie = int(match.group(1))
self.logger.log(f"Successfully inhibited screensaver with cookie {self.inhibit_cookie}")
except Exception as e:
self.logger.log(f"An unexpected error occurred while inhibiting screensaver: {e}")
def _uninhibit_screensaver(self):
if self.inhibit_cookie is None: return
if not shutil.which("gdbus"): return
try:
self.logger.log(f"Attempting to uninhibit screensaver with cookie {self.inhibit_cookie}")
command = [
"gdbus", "call", "--session", "--dest", "org.freedesktop.ScreenSaver",
"--object-path", "/org/freedesktop/ScreenSaver",
"--method", "org.freedesktop.ScreenSaver.UnInhibit",
str(self.inhibit_cookie)
]
subprocess.run(command, capture_output=True, text=True, check=True)
self.logger.log("Successfully uninhibited screensaver.")
except Exception as e:
self.logger.log(f"Failed to uninhibit screensaver: {e}")
finally:
self.inhibit_cookie = None
def start_backup(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool = False, exclude_files: Optional[List[Path]] = None, source_size: int = 0, is_compressed: bool = False, is_encrypted: bool = False, mode: str = "incremental", use_trash_bin: bool = False, no_trash_bin: bool = False):
self.is_system_process = is_system
self._inhibit_screensaver()
mount_point = None
if is_encrypted:
base_dest_path = os.path.dirname(dest_path)
mount_point = self.encryption_manager.prepare_encrypted_destination(
base_dest_path, is_system, source_size, queue)
if not mount_point:
self.logger.log("Failed to prepare encrypted destination. Aborting backup.")
queue.put(('completion', {'status': 'error', 'returncode': -1}))
self._uninhibit_screensaver()
return None
thread = threading.Thread(target=self._run_backup_path, args=(
queue, source_path, dest_path, is_system, is_dry_run, exclude_files, source_size, is_compressed, is_encrypted, mode, mount_point, use_trash_bin, no_trash_bin))
thread.daemon = True
thread.start()
return thread
def _run_backup_path(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool, exclude_files: Optional[List[Path]], source_size: int, is_compressed: bool, is_encrypted: bool, mode: str, mount_point: Optional[str], use_trash_bin: bool, no_trash_bin: bool):
base_dest_path = os.path.dirname(dest_path)
try:
pybackup_dir = os.path.join(base_dest_path, "pybackup")
backup_name = os.path.basename(dest_path)
user_source_name = None
if not is_system:
match = re.match(r"^(\d{2}-\d{2}-\d{4}_\d{2}:\d{2}:\d{2})_user_(.+?)_(full|incremental)", backup_name)
if match: user_source_name = match.group(2)
if is_encrypted:
if not mount_point: raise Exception("Encrypted backup run without a mount point.")
rsync_base_dest = mount_point
else:
rsync_base_dest = os.path.join(pybackup_dir, "user_backups") if not is_system else pybackup_dir
if not is_system:
if user_source_name: rsync_base_dest = os.path.join(rsync_base_dest, user_source_name)
rsync_dest = os.path.join(rsync_base_dest, backup_name)
if not os.path.exists(rsync_base_dest):
if not is_system:
os.makedirs(rsync_base_dest, exist_ok=True)
else:
self.encryption_manager._execute_as_root(f"mkdir -p \"{rsync_base_dest}\"")
latest_backup_path = self._find_latest_backup(rsync_base_dest, is_system)
if not is_system and not latest_backup_path: mode = "full"
elif not is_system and latest_backup_path: mode = "incremental"
command = ['pkexec', 'rsync', '-aAXHvL'] if is_system else ['rsync', '-avL']
if mode == "incremental" and latest_backup_path and not is_dry_run: command.append(f"--link-dest={latest_backup_path}")
command.extend(['--info=progress2'])
if exclude_files: command.extend([f"--exclude-from={f}" for f in exclude_files])
if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists(): command.append(f"--exclude-from={AppConfig.MANUAL_EXCLUDE_LIST_PATH}")
if is_dry_run: command.append('--dry-run')
if not is_system:
trash_bin_path = os.path.join(rsync_base_dest, ".Trash")
if use_trash_bin: command.extend(['--backup', f'--backup-dir={trash_bin_path}', '--delete'])
elif no_trash_bin: command.append('--delete')
if use_trash_bin or no_trash_bin: command.append(f"--exclude={os.path.basename(trash_bin_path)}/")
command.extend([source_path, rsync_dest])
self.logger.log(f"Rsync command: {' '.join(command)}")
transferred_size, total_size, stderr = self._execute_rsync(queue, command)
return_code = self.process.returncode if self.process else -1
if self.process:
status = 'success' if return_code == 0 else 'warning' if return_code in [23, 24] else 'cancelled' if return_code in [143, -15, 15, -9] else 'error'
if status in ['success', 'warning'] and not is_dry_run:
final_size = transferred_size if (mode == 'incremental' and latest_backup_path) else (total_size or source_size)
self._create_info_file(pybackup_dir, backup_name, final_size, is_encrypted)
queue.put(('completion', {'status': status, 'returncode': return_code}))
finally:
# The container is intentionally left mounted for user convenience.
# It will be unmounted when the application closes.
self._uninhibit_screensaver()
self.process = None
def list_all_backups(self, base_dest_path: str):
is_encrypted = self.encryption_manager.is_encrypted(base_dest_path)
mounted_path = None
if is_encrypted:
if self.encryption_manager.is_mounted(base_dest_path):
pybackup_dir = os.path.join(base_dest_path, "pybackup")
mounted_path = os.path.join(pybackup_dir, "encrypted")
else:
return [], []
return self._list_all_backups_from_path(base_dest_path, mounted_path)
def _list_all_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None):
system_backups = self._list_system_backups_from_path(base_dest_path, mounted_path)
user_backups = self._list_user_backups_from_path(base_dest_path, mounted_path)
return system_backups, user_backups
def list_system_backups(self, base_dest_path: str, mount_if_needed: bool = True) -> Optional[List[Dict[str, str]]]:
is_encrypted = self.encryption_manager.is_encrypted(base_dest_path)
mounted_path = None
if is_encrypted:
if not self.encryption_manager.is_mounted(base_dest_path):
if mount_if_needed:
mounted_path = self.encryption_manager.prepare_encrypted_destination(base_dest_path, is_system=True, source_size=0, queue=self.app.queue)
else:
return None
if self.encryption_manager.is_mounted(base_dest_path):
pybackup_dir = os.path.join(base_dest_path, "pybackup")
mounted_path = os.path.join(pybackup_dir, "encrypted")
return self._list_system_backups_from_path(base_dest_path, mounted_path)
def _list_system_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
pybackup_dir = os.path.join(base_dest_path, "pybackup")
if not os.path.isdir(pybackup_dir): return []
all_backups = []
name_regex = re.compile(r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_system_(full|incremental)(\.tar\.gz)?(_encrypted)?\.txt$", re.IGNORECASE)
for item in os.listdir(pybackup_dir):
match = name_regex.match(item)
if not match: continue
date_str, time_str, backup_type_base, comp_ext, enc_suffix = match.groups()
is_encrypted = (enc_suffix is not None)
is_compressed = (comp_ext is not None)
backup_name = item.replace(".txt", "").replace("_encrypted", "")
full_path = os.path.join(mounted_path or pybackup_dir, backup_name)
backup_type = backup_type_base.capitalize()
if is_compressed: backup_type += " (Compressed)"
if is_encrypted: backup_type += " (Encrypted)"
backup_size, comment = "N/A", ""
info_file_path = os.path.join(pybackup_dir, item)
if os.path.exists(info_file_path):
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("originalgröße:"): backup_size = line.split(":", 1)[1].strip().split('(')[0].strip()
elif line.strip().lower().startswith("kommentar:"): comment = line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Could not read info file {info_file_path}: {e}")
all_backups.append({
"date": date_str, "time": time_str, "type": backup_type, "size": backup_size,
"folder_name": backup_name, "full_path": full_path, "comment": comment,
"is_compressed": is_compressed, "is_encrypted": is_encrypted,
"backup_type_base": backup_type_base.capitalize(),
"datetime": datetime.datetime.strptime(f"{date_str} {time_str}", '%d-%m-%Y %H:%M:%S')
})
all_backups.sort(key=lambda x: x['datetime'])
grouped_backups = []
current_group = []
for backup in all_backups:
if backup['backup_type_base'] == 'Full':
if current_group: grouped_backups.append(current_group)
current_group = [backup]
else:
if not current_group: current_group.append(backup)
else: current_group.append(backup)
if current_group: grouped_backups.append(current_group)
grouped_backups.sort(key=lambda g: g[0]['datetime'], reverse=True)
return [item for group in grouped_backups for item in group]
def list_user_backups(self, base_dest_path: str, mount_if_needed: bool = True) -> Optional[List[Dict[str, str]]]:
is_encrypted = self.encryption_manager.is_encrypted(base_dest_path)
mounted_path = None
if is_encrypted:
if not self.encryption_manager.is_mounted(base_dest_path):
if mount_if_needed:
mounted_path = self.encryption_manager.prepare_encrypted_destination(base_dest_path, is_system=False, source_size=0, queue=self.app.queue)
else:
return None
if self.encryption_manager.is_mounted(base_dest_path):
pybackup_dir = os.path.join(base_dest_path, "pybackup")
mounted_path = os.path.join(pybackup_dir, "encrypted")
return self._list_user_backups_from_path(base_dest_path, mounted_path)
def _list_user_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
pybackup_dir = os.path.join(base_dest_path, "pybackup")
if not os.path.isdir(pybackup_dir): return []
user_backups = []
name_regex = re.compile(r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_user_(.+?)_(full|incremental)(_encrypted)?\.txt$", re.IGNORECASE)
for item in os.listdir(pybackup_dir):
match = name_regex.match(item)
if not match: continue
date_str, time_str, source_name, backup_type_base, enc_suffix = match.groups()
is_encrypted = (enc_suffix is not None)
backup_name = item.replace(".txt", "").replace("_encrypted", "")
if mounted_path:
user_backup_dir = os.path.join(mounted_path, source_name)
full_path = os.path.join(user_backup_dir, backup_name)
else:
user_backups_dir = os.path.join(pybackup_dir, "user_backups", source_name)
full_path = os.path.join(user_backups_dir, backup_name)
backup_type = backup_type_base.capitalize()
if is_encrypted: backup_type += " (Encrypted)"
backup_size, comment = "N/A", ""
info_file_path = os.path.join(pybackup_dir, item)
if os.path.exists(info_file_path):
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("originalgröße:"): backup_size = line.split(":", 1)[1].strip().split('(')[0].strip()
elif line.strip().lower().startswith("kommentar:"): comment = line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Could not read info file {info_file_path}: {e}")
user_backups.append({
"date": date_str, "time": time_str, "type": backup_type, "size": backup_size,
"folder_name": backup_name, "full_path": full_path, "comment": comment,
"is_encrypted": is_encrypted, "source": source_name, "is_compressed": False,
"backup_type_base": backup_type_base.capitalize(),
"datetime": datetime.datetime.strptime(f"{date_str} {time_str}", '%d-%m-%Y %H:%M:%S')
})
user_backups.sort(key=lambda x: x['datetime'], reverse=True)
return user_backups
def _find_latest_backup(self, base_backup_path: str, is_system: bool) -> Optional[str]:
self.logger.log(f"Searching for latest backup in: {base_backup_path}")
backup_names = []
if os.path.isdir(base_backup_path):
for item in os.listdir(base_backup_path):
if os.path.isdir(os.path.join(base_backup_path, item)):
if is_system:
if "_system_" in item: backup_names.append(item)
else:
backup_names.append(item)
backup_names.sort(reverse=True)
if not backup_names: return None
latest_backup_path = os.path.join(base_backup_path, backup_names[0])
if os.path.isdir(latest_backup_path):
self.logger.log(f"Found latest backup for --link-dest: {latest_backup_path}")
return latest_backup_path
return None
def _create_info_file(self, pybackup_dir: str, backup_name: str, source_size: int, is_encrypted: bool):
try:
info_filename = f"{backup_name}{'_encrypted' if is_encrypted else ''}.txt"
info_file_path = os.path.join(pybackup_dir, info_filename)
if source_size > 0:
power, n = 1024, 0
power_labels = {0: 'B', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'}
display_size = source_size
while display_size >= power and n < len(power_labels) - 1:
display_size /= power
n += 1
size_str = f"{display_size:.2f} {power_labels[n]}"
else:
size_str = "0 B"
date_str = datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S")
info_content = (f"Backup-Datum: {date_str}\n" f"Originalgröße: {size_str} ({source_size} Bytes)\n")
with open(info_file_path, 'w') as f: f.write(info_content)
self.logger.log(f"Successfully created metadata file: {info_file_path}")
except Exception as e:
self.logger.log(f"Failed to create metadata file for {pybackup_dir}. Error: {e}")
def _execute_rsync(self, queue, command: List[str]):
transferred_size, total_size, stderr_output = 0, 0, ""
try:
env = os.environ.copy()
env['LC_ALL'] = 'C'
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1, preexec_fn=os.setsid, env=env)
if self.process.stdout:
for line in iter(self.process.stdout.readline, ''):
stripped_line = line.strip()
self.logger.log(f"Rsync stdout: {stripped_line}")
if '%' in stripped_line:
match = re.search(r'\s*(\d+)%\s+', stripped_line)
if match: queue.put(('progress', int(match.group(1))))
elif stripped_line and not stripped_line.startswith(('sending', 'sent', 'total')):
queue.put(('file_update', stripped_line))
self.process.wait()
if self.process.stderr:
stderr_output = self.process.stderr.read()
if stderr_output: self.logger.log(f"Rsync Stderr: {stderr_output.strip()}")
except FileNotFoundError:
self.logger.log(f"Error: '{command[0]}' not found.")
queue.put(('error', None))
except Exception as e:
self.logger.log(f"Rsync execution error: {e}")
queue.put(('error', None))
return transferred_size, total_size, stderr_output
def start_restore(self, source_path: str, dest_path: str, is_compressed: bool):
from queue import Queue
queue = self.app.queue if hasattr(self.app, 'queue') else Queue()
thread = threading.Thread(target=self._run_restore, args=(queue, source_path, dest_path, is_compressed))
thread.daemon = True
thread.start()
def _run_restore(self, queue, source_path: str, dest_path: str, is_compressed: bool):
self.logger.log(f"Starting restore from {source_path} to {dest_path}")
status = 'error'
try:
source = source_path.rstrip('/') + '/'
script_content = f"tar -xzf '{source_path}' -C '{dest_path}'" if is_compressed else f"rsync -aAXHv '{source}' '{dest_path}'"
if self.encryption_manager._execute_as_root(script_content):
status = 'success'
else:
self.logger.log("Restore script failed.")
except Exception as e:
self.logger.log(f"An unexpected error occurred during restore: {e}")
finally:
queue.put(('completion', {'status': status, 'returncode': 0 if status == 'success' else 1}))
def get_scheduled_jobs(self) -> List[Dict[str, Any]]:
jobs_list = []
try:
user_cron = CronTab(user=True)
for job in user_cron:
if self.app_tag in job.comment:
details = self._parse_job_comment(job.comment)
if details: jobs_list.append(details)
except Exception as e:
self.logger.log(f"Error loading cron jobs: {e}")
return jobs_list
def start_delete_backup(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str] = None):
thread = threading.Thread(target=self._run_delete, args=(
path_to_delete, info_file_path, is_encrypted, is_system, base_dest_path, queue, password))
thread.daemon = True
thread.start()
def _run_delete(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str]):
try:
if is_encrypted:
self.logger.log(f"Starting encrypted deletion for {path_to_delete}")
mount_point = self.encryption_manager.prepare_encrypted_destination(
base_dest_path, is_system, source_size=0, queue=queue)
if not mount_point:
self.logger.log("Failed to unlock container for deletion.")
queue.put(('deletion_complete', False))
return
internal_path_to_delete = os.path.join(mount_point, os.path.basename(path_to_delete))
script_content = f"rm -rf '{internal_path_to_delete}'\nrm -f '{info_file_path}'"
success = self.encryption_manager._execute_as_root(script_content)
if success:
self.logger.log("Encrypted backup deleted successfully.")
queue.put(('deletion_complete', True))
else:
self.logger.log("Failed to delete files within encrypted container.")
queue.put(('deletion_complete', False))
elif is_system:
script_content = f"rm -rf '{path_to_delete}'\nrm -f '{info_file_path}'"
if self.encryption_manager._execute_as_root(script_content):
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
else:
self.logger.log(f"Failed to delete {path_to_delete}")
queue.put(('deletion_complete', False))
else:
try:
if os.path.isdir(path_to_delete): shutil.rmtree(path_to_delete)
if os.path.exists(info_file_path): os.remove(info_file_path)
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
except Exception as e:
self.logger.log(f"Failed to delete unencrypted user backup {path_to_delete}: {e}")
queue.put(('deletion_complete', False))
except Exception as e:
self.logger.log(f"Error during threaded deletion: {e}")
queue.put(('deletion_complete', False))
def cancel_and_delete_privileged_backup(self, delete_path: str):
if not self.process or self.process.poll() is not None: return
self.logger.log("Attempting to cancel backup and delete directory with root privileges...")
try:
pgid = os.getpgid(self.process.pid)
script_content = f"""
kill -SIGTERM -- -{pgid} || echo 'Process group not found or already terminated.'
if [ -n \"{delete_path}\" ] && [ \"{delete_path}\" != \"/\" ]; then rm -rf \"{delete_path}\"; fi
"""
self.encryption_manager._execute_as_root(script_content)
except Exception as e:
self.logger.log(f"An error occurred during privileged cancel and delete: {e}")