Files
Py-Backup/core/backup_manager.py
Désiré Werner Menrath 452a56b813 feat: Implement auto-scaling encrypted containers and fix UI workflow
Refactors the encryption mechanism to use a flexible LVM-on-a-loop-device backend instead of a fixed-size file. This resolves issues with containers running out of space.

- Implements auto-resizing of the container when a backup fails due to lack of space.
- Implements transparent inspection of encrypted containers, allowing the UI to display their contents (full/incremental backups) just like unencrypted ones.
- Fixes deletion of encrypted backups by ensuring the container is unlocked before deletion.
- Fixes a bug where deleting unencrypted user backups incorrectly required root privileges.
- Fixes a UI freeze caused by calling a password dialog from a non-UI thread during deletion.
- Simplifies the UI by removing the now-obsolete "Show Encrypted Backups" button.
- Changes the default directory for encrypted user backups to `user_encrypt`.
2025-09-06 12:46:36 +02:00

838 lines
39 KiB
Python

import subprocess
import os
import threading
import re
import signal
import datetime
from typing import Optional, List, Dict, Any
from pathlib import Path
from crontab import CronTab
import tempfile
import stat
import shutil
from core.pbp_app_config import AppConfig
from pyimage_ui.password_dialog import PasswordDialog
from core.encryption_manager import EncryptionManager
class BackupManager:
"""
Handles the logic for creating and managing backups using rsync.
"""
def __init__(self, logger, app=None):
self.logger = logger
self.process = None
self.app_tag = "# Py-Backup Job"
self.is_system_process = False
self.app = app
self.encryption_manager = EncryptionManager(logger, app)
def cancel_and_delete_privileged_backup(self, delete_path: str):
"""Cancels a running system backup and deletes the target directory in one atomic pkexec call."""
if not self.process or self.process.poll() is not None:
self.logger.log("No active backup process to cancel.")
return
self.logger.log(
"Attempting to cancel backup and delete directory with root privileges...")
try:
pgid = os.getpgid(self.process.pid)
script_parts = [
f"echo 'Attempting to terminate process group {pgid}'",
f"kill -SIGTERM -- -{pgid} || echo 'Process group {pgid} not found or already terminated.'",
f"echo 'Attempting to delete directory {delete_path}'",
f'if [ -n "{delete_path}" ] && [ "{delete_path}" != "/" ]; then',
f' rm -rf "{delete_path}"',
f'fi'
]
script_content = "\n".join(script_parts)
if self.encryption_manager._execute_as_root(script_content):
self.logger.log(
"Backup cancellation and deletion script succeeded.")
else:
self.logger.log(
"Backup cancellation and deletion script failed.")
except ProcessLookupError:
self.logger.log("Backup process already terminated before action.")
self.delete_privileged_path(delete_path)
except Exception as e:
self.logger.log(
f"An error occurred during privileged cancel and delete: {e}")
def delete_privileged_path(self, path: str):
"""Deletes a given path using root privileges."""
self.logger.log(f"Requesting privileged deletion of: {path}")
if not path or path == "/":
self.logger.log("Invalid path for deletion provided.")
return
script_content = f'rm -rf "{path}"'
if self.encryption_manager._execute_as_root(script_content):
self.logger.log(f"Successfully deleted path: {path}")
else:
self.logger.log(f"Failed to delete path: {path}")
def start_delete_backup(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str] = None):
"""Starts a threaded backup deletion."""
thread = threading.Thread(target=self._run_delete, args=(
path_to_delete, info_file_path, is_encrypted, is_system, base_dest_path, queue, password))
thread.daemon = True
thread.start()
def _run_delete(self, path_to_delete: str, info_file_path: str, is_encrypted: bool, is_system: bool, base_dest_path: str, queue, password: Optional[str]):
"""Runs the deletion and puts a message on the queue when done."""
try:
if is_encrypted:
self.logger.log(f"Starting encrypted deletion for {path_to_delete}")
if not password:
self.logger.log("Password not provided for encrypted deletion.")
queue.put(('deletion_complete', False))
return
mount_point = self.encryption_manager.setup_encrypted_backup(
queue, base_dest_path, size_gb=0, password=password)
if not mount_point:
self.logger.log("Failed to unlock container for deletion.")
queue.put(('deletion_complete', False))
return
self.logger.log(f"Container unlocked. Deleting {path_to_delete} and {info_file_path}")
script_content = f"""
rm -rf '{path_to_delete}'
rm -f '{info_file_path}'
"""
success = self.encryption_manager._execute_as_root(script_content)
self.encryption_manager.cleanup_encrypted_backup(base_dest_path)
if success:
self.logger.log("Encrypted backup deleted successfully.")
queue.put(('deletion_complete', True))
else:
self.logger.log("Failed to delete files within encrypted container.")
queue.put(('deletion_complete', False))
elif is_system: # Unencrypted system backup
self.logger.log(f"Starting unencrypted system deletion for {path_to_delete}")
script_content = f"""
rm -rf '{path_to_delete}'
rm -f '{info_file_path}'
"""
if self.encryption_manager._execute_as_root(script_content):
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
else:
self.logger.log(f"Failed to delete {path_to_delete}")
queue.put(('deletion_complete', False))
else: # Unencrypted user backup
self.logger.log(f"Starting unencrypted user deletion for {path_to_delete}")
try:
if os.path.isdir(path_to_delete):
shutil.rmtree(path_to_delete)
if os.path.exists(info_file_path):
os.remove(info_file_path)
self.logger.log(f"Successfully deleted {path_to_delete} and {info_file_path}")
queue.put(('deletion_complete', True))
except Exception as e:
self.logger.log(f"Failed to delete unencrypted user backup {path_to_delete}: {e}")
queue.put(('deletion_complete', False))
except Exception as e:
self.logger.log(f"Error during threaded deletion: {e}")
queue.put(('deletion_complete', False))
def cancel_backup(self):
if self.process and self.process.poll() is None:
self.logger.log("Attempting to cancel backup...")
try:
pgid = os.getpgid(self.process.pid)
if self.is_system_process:
self.logger.log(
f"Cancelling system process with pgid {pgid} via privileged script.")
script_content = f"kill -SIGTERM -- -{pgid}"
self.encryption_manager._execute_as_root(script_content)
else:
os.killpg(pgid, signal.SIGTERM)
self.logger.log("Backup process terminated.")
except ProcessLookupError:
self.logger.log(
"Backup process already terminated or not found.")
except Exception as e:
self.logger.log(f"Failed to terminate backup process: {e}")
else:
self.logger.log("No active backup process to cancel.")
def start_backup(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool = False, exclude_files: Optional[List[Path]] = None, source_size: int = 0, is_compressed: bool = False, is_encrypted: bool = False, mode: str = "incremental", password: Optional[str] = None, key_file: Optional[str] = None):
self.is_system_process = is_system
thread = threading.Thread(target=self._run_backup_path, args=(
queue, source_path, dest_path, is_system, is_dry_run, exclude_files, source_size, is_compressed, is_encrypted, mode, password, key_file))
thread.daemon = True
thread.start()
return thread
def _find_latest_backup(self, base_backup_path: str) -> Optional[str]:
"""Finds the most recent backup directory in a given path."""
self.logger.log(f"Searching for latest backup in: {base_backup_path}")
backup_names = self.list_backups(base_backup_path)
if not backup_names:
self.logger.log("No previous backups found to link against.")
return None
latest_backup_name = backup_names[0]
latest_backup_path = os.path.join(base_backup_path, latest_backup_name)
if os.path.isdir(latest_backup_path):
self.logger.log(f"Found latest backup for --link-dest: {latest_backup_path}")
return latest_backup_path
self.logger.log(f"Latest backup entry '{latest_backup_name}' was not a directory. No link will be used.")
return None
def _compress_and_cleanup(self, dest_path: str, is_system: bool) -> bool:
"""Compresses the backup directory and cleans up the original."""
self.logger.log(f"Starting compression for: {dest_path}")
parent_dir = os.path.dirname(dest_path)
archive_name = os.path.basename(dest_path) + ".tar.gz"
archive_path = os.path.join(parent_dir, archive_name)
tar_command = f"tar -czf '{archive_path}' -C '{parent_dir}' '{os.path.basename(dest_path)}'"
rm_command = f"rm -rf '{dest_path}'"
script_content = f"""
#!/bin/bash
set -e
{tar_command}
echo \"tar command finished with exit code $?.\"
{rm_command}
echo \"rm command finished with exit code $?.\"
"""
if is_system or is_encrypted:
self.logger.log("Executing compression and cleanup as root.")
if self.encryption_manager._execute_as_root(script_content):
self.logger.log("Compression and cleanup script executed successfully.")
return True
else:
self.logger.log("Compression and cleanup script failed.")
return False
else:
try:
self.logger.log(f"Executing local command: {tar_command}")
tar_result = subprocess.run(tar_command, shell=True, capture_output=True, text=True, check=True)
self.logger.log(f"tar command successful. Output: {tar_result.stdout}")
self.logger.log(f"Executing local command: {rm_command}")
rm_result = subprocess.run(rm_command, shell=True, capture_output=True, text=True, check=True)
self.logger.log(f"rm command successful. Output: {rm_result.stdout}")
return True
except subprocess.CalledProcessError as e:
self.logger.log(f"A command failed during local compression/cleanup. Return code: {e.returncode}")
self.logger.log(f"Stdout: {e.stdout}")
self.logger.log(f"Stderr: {e.stderr}")
return False
except Exception as e:
self.logger.log(f"An unexpected error occurred during local compression/cleanup: {e}")
return False
def _run_backup_path(self, queue, source_path: str, dest_path: str, is_system: bool, is_dry_run: bool, exclude_files: Optional[List[Path]], source_size: int, is_compressed: bool, is_encrypted: bool, mode: str, password: Optional[str], key_file: Optional[str]):
try:
base_dest_path = os.path.dirname(dest_path)
pybackup_dir = os.path.join(base_dest_path, "pybackup")
backup_name = os.path.basename(dest_path)
os.makedirs(pybackup_dir, exist_ok=True)
mount_point = None
if is_encrypted:
# Initial size is 110% of source size + 1GB
size_gb = int(source_size / (1024**3) * 1.1) + 1
mount_point = self.encryption_manager.setup_encrypted_backup(
queue, base_dest_path, size_gb, password=password, key_file=key_file)
if not mount_point:
queue.put(('completion', {'status': 'error', 'returncode': -1}))
return
rsync_base_dest = mount_point
if not is_system:
user_backup_dir = os.path.join(mount_point, "user_encrypt")
if not self.encryption_manager._execute_as_root(f"mkdir -p {user_backup_dir}"):
self.logger.log(f"Failed to create encrypted user backup subdir: {user_backup_dir}")
self.encryption_manager.cleanup_encrypted_backup(base_dest_path)
queue.put(('completion', {'status': 'error', 'returncode': -1}))
return
rsync_base_dest = user_backup_dir
rsync_dest = os.path.join(rsync_base_dest, backup_name)
else: # Not encrypted
rsync_base_dest = pybackup_dir
if not is_system:
rsync_base_dest = os.path.join(pybackup_dir, "user_backups")
os.makedirs(rsync_base_dest, exist_ok=True)
rsync_dest = os.path.join(rsync_base_dest, backup_name)
self.logger.log(f"Starting backup from '{source_path}' to '{rsync_dest}'...")
if os.path.isdir(source_path) and not source_path.endswith('/'):
source_path += '/'
if not os.path.exists(rsync_base_dest):
# For encrypted, this is created by the mount. For non-encrypted, create it here.
if not is_encrypted:
os.makedirs(rsync_base_dest, exist_ok=True)
latest_backup_path = self._find_latest_backup(rsync_base_dest)
command = []
if is_system or is_encrypted:
command.extend(['pkexec', 'rsync', '-aAXHv'])
else:
command.extend(['rsync', '-av'])
if mode == "incremental" and latest_backup_path and not is_dry_run:
command.append(f"--link-dest={latest_backup_path}")
command.extend(['--info=progress2'])
if exclude_files:
command.extend([f"--exclude-from={f}" for f in exclude_files])
if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists():
command.append(f"--exclude-from={AppConfig.MANUAL_EXCLUDE_LIST_PATH}")
if is_dry_run:
command.append('--dry-run')
command.extend([source_path, rsync_dest])
self.logger.log(f"Rsync command: {' '.join(command)}")
# Initial rsync execution
transferred_size, total_size, stderr = self._execute_rsync(queue, command)
return_code = self.process.returncode if self.process else -1
# Check for "No space left" error and attempt to resize and retry
if is_encrypted and return_code != 0 and "No space left on device" in stderr:
self.logger.log("Rsync failed due to lack of space. Attempting to resize container.")
queue.put(('status_update', 'Container voll. Vergrößere automatisch...'))
queue.put(('progress_mode', 'indeterminate'))
container_path = os.path.join(pybackup_dir, "pybackup_lvm.img")
current_size_bytes = os.path.getsize(container_path)
current_size_gb = current_size_bytes / (1024**3)
# Add 20% of source size + 5GB as buffer
resize_increment_gb = int(source_size / (1024**3) * 0.2) + 5
new_size_gb = int(current_size_gb + resize_increment_gb)
self.logger.log(f"Current container size: {current_size_gb:.2f}GB. Attempting resize to {new_size_gb}GB.")
if self.encryption_manager.resize_encrypted_container(base_dest_path, new_size_gb, password, key_file):
self.logger.log("Container resized successfully. Retrying rsync.")
queue.put(('status_update', 'Vergrößerung erfolgreich. Setze Backup fort...'))
queue.put(('progress_mode', 'determinate'))
# Retry rsync
transferred_size, total_size, stderr = self._execute_rsync(queue, command)
return_code = self.process.returncode if self.process else -1
else:
self.logger.log("Failed to resize container. Aborting backup.")
queue.put(('error', "Container-Vergrößerung fehlgeschlagen. Backup abgebrochen."))
# No need to set status, completion will be handled as error
self.logger.log(f"_execute_rsync returned: transferred_size={transferred_size}, total_size={total_size}")
if self.process:
self.logger.log(f"Rsync process finished with return code: {return_code}")
status = 'error'
if return_code == 0: status = 'success'
elif return_code in [23, 24]: status = 'warning'
elif return_code in [143, -15, 15, -9]: status = 'cancelled'
if status in ['success', 'warning'] and not is_dry_run:
info_filename_base = backup_name
if is_compressed:
# ... (compression logic remains the same)
pass
final_size = transferred_size if (mode == 'incremental' and latest_backup_path) else (total_size or source_size)
self._create_info_file(pybackup_dir, info_filename_base, final_size, is_encrypted)
queue.put(('completion', {'status': status, 'returncode': return_code}))
else:
self.logger.log("Rsync process did not start or self.process is None.")
queue.put(('completion', {'status': 'error', 'returncode': -1}))
self.logger.log(f"Backup to '{rsync_dest}' completed.")
finally:
if is_encrypted and mount_point:
self.encryption_manager.cleanup_encrypted_backup(base_dest_path)
self.process = None
def _create_info_file(self, pybackup_dir: str, backup_name: str, source_size: int, is_encrypted: bool):
try:
info_filename = f"{backup_name}_encrypted.txt" if is_encrypted else f"{backup_name}.txt"
info_file_path = os.path.join(pybackup_dir, info_filename)
original_bytes = source_size
if source_size > 0:
power = 1024
n = 0
power_labels = {0: 'B', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'}
display_size = original_bytes
while display_size >= power and n < len(power_labels) - 1:
display_size /= power
n += 1
size_str = f"{display_size:.2f} {power_labels[n]}"
else:
size_str = "0 B"
date_str = datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S")
info_content = (
f"Backup-Datum: {date_str}\n"
f"Originalgröße: {size_str} ({original_bytes} Bytes)\n"
)
self.logger.log(
f"Attempting to write info file to {info_file_path} as current user.")
with open(info_file_path, 'w') as f:
f.write(info_content)
self.logger.log(
f"Successfully created metadata file: {info_file_path}")
except Exception as e:
self.logger.log(
f"Failed to create metadata file. Please check permissions for {pybackup_dir}. Error: {e}")
def _execute_rsync(self, queue, command: List[str]):
transferred_size = 0
total_size = 0
stderr_output = ""
try:
try:
env = os.environ.copy()
env['LC_ALL'] = 'C'
self.process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1, preexec_fn=os.setsid, env=env)
except FileNotFoundError:
self.logger.log(
"Error: 'pkexec' or 'rsync' command not found in PATH during Popen call.")
queue.put(('error', None))
return 0, 0, ""
except Exception as e:
self.logger.log(
f"Error starting rsync process with Popen: {e}")
queue.put(('error', None))
return 0, 0, ""
if self.process is None:
self.logger.log(
"Error: subprocess.Popen returned None for rsync process (after exception handling).")
queue.put(('error', None))
return 0, 0, ""
progress_regex = re.compile(r'\s*(\d+)%\s+')
output_lines = []
if self.process.stdout:
full_stdout = []
for line in iter(self.process.stdout.readline, ''):
stripped_line = line.strip()
self.logger.log(f"Rsync stdout line: {stripped_line}")
full_stdout.append(stripped_line)
match = progress_regex.search(stripped_line)
if match:
percentage = int(match.group(1))
queue.put(('progress', percentage))
else:
if stripped_line and not stripped_line.startswith(('sending incremental file list', 'sent', 'total size')):
queue.put(('file_update', stripped_line))
self.process.wait()
if self.process.stderr:
stderr_output = self.process.stderr.read()
if stderr_output:
self.logger.log(f"Rsync Stderr: {stderr_output.strip()}")
full_stdout.extend(stderr_output.strip().split('\n'))
output_lines = full_stdout
transferred_size = 0
total_size = 0
summary_regex = re.compile(r"sent ([\d,.]+) bytes\s+received ([\d,.]+) bytes")
total_size_regex = re.compile(r"total size is ([\d,.]+) speedup")
for line in reversed(output_lines):
match = summary_regex.search(line)
if match and transferred_size == 0:
try:
sent_str = match.group(1).replace(',', '').replace('.', '')
received_str = match.group(2).replace(',', '').replace('.', '')
bytes_sent = int(sent_str)
bytes_received = int(received_str)
transferred_size = bytes_sent + bytes_received
self.logger.log(
f"Detected total bytes transferred from summary: {transferred_size} bytes")
except (ValueError, IndexError) as e:
self.logger.log(
f"Could not parse sent/received bytes from line: '{line}'. Error: {e}")
total_match = total_size_regex.search(line)
if total_match and total_size == 0:
try:
total_size_str = total_match.group(1).replace(',', '').replace('.', '')
total_size = int(total_size_str)
self.logger.log(f"Detected total size from summary: {total_size} bytes")
except(ValueError, IndexError) as e:
self.logger.log(f"Could not parse total size from line: '{line}'. Error: {e}")
self.logger.log(f"_execute_rsync final parsed values: transferred_size={transferred_size}, total_size={total_size}")
if transferred_size == 0:
bytes_sent = 0
bytes_received = 0
for line in output_lines:
if line.strip().startswith('Total bytes sent:'):
try:
size_str = line.split(':')[1].strip()
bytes_sent = int(size_str.replace(',', '').replace('.', ''))
except (ValueError, IndexError):
self.logger.log(f"Could not parse bytes sent from line: {line}")
elif line.strip().startswith('Total bytes received:'):
try:
size_str = line.split(':')[1].strip()
bytes_received = int(size_str.replace(',', '').replace('.', ''))
except (ValueError, IndexError):
self.logger.log(f"Could not parse bytes received from line: {line}")
if bytes_sent > 0 or bytes_received > 0:
transferred_size = bytes_sent + bytes_received
self.logger.log(
f"Detected total bytes transferred from --stats: {transferred_size} bytes")
else:
self.logger.log(
"Could not determine transferred size from rsync output. Size will be 0.")
except FileNotFoundError:
self.logger.log(
"Error: 'rsync' command not found. Please ensure it is installed and in your PATH.")
queue.put(('error', None))
except Exception as e:
self.logger.log(f"An unexpected error occurred: {e}")
queue.put(('error', None))
return transferred_size, total_size, stderr_output
def start_restore(self, source_path: str, dest_path: str, is_compressed: bool):
"""Starts a restore process in a separate thread."""
try:
queue = self.app.queue
except AttributeError:
self.logger.log("Could not get queue from app instance. Restore progress will not be reported.")
from queue import Queue
queue = Queue()
thread = threading.Thread(target=self._run_restore, args=(
queue, source_path, dest_path, is_compressed))
thread.daemon = True
thread.start()
def _run_restore(self, queue, source_path: str, dest_path: str, is_compressed: bool):
"""Executes the restore logic for a system backup."""
self.logger.log(f"Starting restore from {source_path} to {dest_path}")
status = 'error'
try:
if is_compressed:
script_content = f"tar -xzf '{source_path}' -C '{dest_path}'"
else:
source = source_path.rstrip('/') + '/'
script_content = f"rsync -aAXHv '{source}' '{dest_path}'"
if self.encryption_manager._execute_as_root(script_content):
self.logger.log("Restore script executed successfully.")
status = 'success'
else:
self.logger.log("Restore script failed.")
status = 'error'
except Exception as e:
self.logger.log(f"An unexpected error occurred during restore: {e}")
status = 'error'
finally:
queue.put(('completion', {'status': status, 'returncode': 0 if status == 'success' else 1}))
def get_scheduled_jobs(self) -> List[Dict[str, Any]]:
jobs_list = []
try:
user_cron = CronTab(user=True)
for job in user_cron:
if self.app_tag in job.comment:
details = self._parse_job_comment(job.comment)
if details:
jobs_list.append({
"id": job.comment,
"active": job.is_enabled(),
"type": details.get("type", "N/A"),
"frequency": details.get("freq", "N/A"),
"destination": details.get("dest", "N/A"),
"sources": details.get("sources", []),
"command": job.command
})
except Exception as e:
self.logger.log(f"Error loading cron jobs: {e}")
return jobs_list
def add_scheduled_job(self, job_details: Dict[str, Any]):
try:
user_cron = CronTab(user=True)
job = user_cron.new(
command=job_details["command"], comment=job_details["comment"])
if job_details["frequency"] == "daily":
job.day.every(1)
elif job_details["frequency"] == "weekly":
job.dow.every(1)
elif job_details["frequency"] == "monthly":
job.dom.every(1)
job.enable()
user_cron.write()
self.logger.log(
f"Job successfully added: {job_details['comment']}")
except Exception as e:
self.logger.log(f"Error adding cron job: {e}")
def remove_scheduled_job(self, job_id: str):
try:
user_cron = CronTab(user=True)
user_cron.remove_all(comment=job_id)
user_cron.write()
self.logger.log(f"Job successfully removed: {job_id}")
except Exception as e:
self.logger.log(f"Error removing cron job: {e}")
def _parse_job_comment(self, comment: str) -> Dict[str, Any]:
details = {}
parts = comment.split("; ")
for part in parts:
if ":" in part:
key, value = part.split(":", 1)
if key.strip() == "sources":
details[key.strip()] = [s.strip()
for s in value.split(",")]
else:
details[key.strip()] = value.strip()
return details
def has_encrypted_backups(self, base_backup_path: str) -> bool:
"""Checks if any encrypted backups (system or user) exist in the destination."""
system_backups = self.list_system_backups(base_backup_path)
for backup in system_backups:
if backup.get('is_encrypted'):
return True
user_backups = self.list_user_backups(base_backup_path)
for backup in user_backups:
if backup.get('is_encrypted'):
return True
return False
def list_backups(self, base_backup_path: str) -> List[str]:
backups = []
if os.path.isdir(base_backup_path):
for item in os.listdir(base_backup_path):
full_path = os.path.join(base_backup_path, item)
if os.path.isdir(full_path):
backups.append(item)
return sorted(backups, reverse=True)
def list_system_backups(self, base_dest_path: str) -> List[Dict[str, str]]:
"""Lists all system backups, looking inside encrypted containers if necessary."""
return self.encryption_manager.inspect_container(base_dest_path, self._list_system_backups_from_path)
def _list_system_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
# Info files are always in the non-mounted pybackup directory
pybackup_dir = os.path.join(base_dest_path, "pybackup")
if not os.path.isdir(pybackup_dir):
return []
all_backups = []
name_regex = re.compile(
r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_system_(full|incremental)(\.tar\.gz)?(_encrypted)?\.txt$", re.IGNORECASE)
for item in os.listdir(pybackup_dir):
match = name_regex.match(item)
if not match:
continue
date_str, time_str, backup_type_base, comp_ext, enc_suffix = match.groups()
is_encrypted = (enc_suffix is not None)
is_compressed = (comp_ext is not None)
backup_name = item.replace(".txt", "").replace("_encrypted", "")
# The full_path to the backup data is inside the mounted path if it exists
if mounted_path:
full_path = os.path.join(mounted_path, backup_name)
else: # Unencrypted backups are in a subdir of pybackup_dir
full_path = os.path.join(pybackup_dir, backup_name)
backup_type = backup_type_base.capitalize()
if is_compressed: backup_type += " (Compressed)"
if is_encrypted: backup_type += " (Encrypted)"
backup_size = "N/A"
comment = ""
info_file_path = os.path.join(pybackup_dir, item)
if os.path.exists(info_file_path):
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("originalgröße:"):
backup_size = line.split(":", 1)[1].strip().split('(')[0].strip()
elif line.strip().lower().startswith("kommentar:"):
comment = line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Could not read info file {info_file_path}: {e}")
all_backups.append({
"date": date_str, "time": time_str, "type": backup_type,
"size": backup_size, "folder_name": backup_name, "full_path": full_path,
"comment": comment, "is_compressed": is_compressed, "is_encrypted": is_encrypted,
"backup_type_base": backup_type_base.capitalize(),
"datetime": datetime.datetime.strptime(f"{date_str} {time_str}", '%d-%m-%Y %H:%M:%S')
})
all_backups.sort(key=lambda x: x['datetime'])
grouped_backups = []
current_group = []
for backup in all_backups:
if backup['backup_type_base'] == 'Full':
if current_group: grouped_backups.append(current_group)
current_group = [backup]
else:
if not current_group: current_group = [backup]
else: current_group.append(backup)
if current_group: grouped_backups.append(current_group)
grouped_backups.sort(key=lambda g: g[0]['datetime'], reverse=True)
final_sorted_list = [item for group in grouped_backups for item in group]
return final_sorted_list
def list_user_backups(self, base_dest_path: str) -> List[Dict[str, str]]:
"""Lists all user backups, looking inside encrypted containers if necessary."""
return self.encryption_manager.inspect_container(base_dest_path, self._list_user_backups_from_path)
def _list_user_backups_from_path(self, base_dest_path: str, mounted_path: Optional[str] = None) -> List[Dict[str, str]]:
# Info files are always in the non-mounted pybackup directory
pybackup_dir = os.path.join(base_dest_path, "pybackup")
if not os.path.isdir(pybackup_dir):
return []
user_backups = []
name_regex = re.compile(
r"^(\d{2}-\d{2}-\d{4})_(\d{2}:\d{2}:\d{2})_user_(.+?)(_encrypted)?\.txt$", re.IGNORECASE)
for item in os.listdir(pybackup_dir):
match = name_regex.match(item)
if not match:
continue
date_str, time_str, source_name, enc_suffix = match.groups()
is_encrypted = (enc_suffix is not None)
backup_name = item.replace(".txt", "").replace("_encrypted", "")
# The full_path to the backup data is inside the mounted path if it exists
if mounted_path:
# User backups are in a subdir within the encrypted mount
user_backup_dir = os.path.join(mounted_path, "user_encrypt")
full_path = os.path.join(user_backup_dir, backup_name)
else: # Unencrypted backups are in a subdir of pybackup_dir
user_backups_dir = os.path.join(pybackup_dir, "user_backups")
full_path = os.path.join(user_backups_dir, backup_name)
backup_size = "N/A"
comment = ""
info_file_path = os.path.join(pybackup_dir, item)
if os.path.exists(info_file_path):
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("originalgröße:"):
backup_size = line.split(":", 1)[1].strip().split('(')[0].strip()
elif line.strip().lower().startswith("kommentar:"):
comment = line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Could not read info file {info_file_path}: {e}")
user_backups.append({
"date": date_str, "time": time_str, "size": backup_size,
"folder_name": backup_name, "full_path": full_path, "comment": comment,
"is_encrypted": is_encrypted, "source": source_name
})
user_backups.sort(key=lambda x: f"{x['date']} {x['time']}", reverse=True)
return user_backups
def get_comment(self, info_file_path: str) -> str:
"""Reads an info file and returns the comment, if it exists."""
if not os.path.exists(info_file_path):
return ""
try:
with open(info_file_path, 'r') as f:
for line in f:
if line.strip().lower().startswith("kommentar:"):
return line.split(":", 1)[1].strip()
except Exception as e:
self.logger.log(f"Error reading comment from {info_file_path}: {e}")
return ""
def update_comment(self, info_file_path: str, new_comment: str):
"""Updates the comment in a given info file."""
try:
lines = []
comment_found = False
if os.path.exists(info_file_path):
with open(info_file_path, 'r') as f:
lines = f.readlines()
new_lines = []
for line in lines:
if line.strip().lower().startswith("kommentar:"):
if new_comment:
new_lines.append(f"Kommentar: {new_comment}\n")
comment_found = True
else:
new_lines.append(line)
if not comment_found and new_comment:
new_lines.append(f"Kommentar: {new_comment}\n")
with open(info_file_path, 'w') as f:
f.writelines(new_lines)
self.logger.log(f"Successfully updated comment in {info_file_path}")
except Exception as e:
self.logger.log(f"Error updating comment in {info_file_path}: {e}")
def test_pkexec_rsync(self, source_path: str, dest_path: str):
self.logger.log(f"Testing pkexec rsync command...")
command = ['pkexec', 'rsync', '-aAXHv', source_path, dest_path]
try:
result = subprocess.run(
command, capture_output=True, text=True, check=False)
self.logger.log(f"pkexec rsync return code: {result.returncode}")
self.logger.log(f"pkexec rsync stdout: {result.stdout.strip()}")
self.logger.log(f"pkexec rsync stderr: {result.stderr.strip()}")
except FileNotFoundError:
self.logger.log("Error: 'pkexec' or 'rsync' command not found.")
except Exception as e:
self.logger.log(
f"An unexpected error occurred during pkexec rsync test: {e}")