diff --git a/core/backup_manager.py b/core/backup_manager.py index 1fb5640..de44a44 100644 --- a/core/backup_manager.py +++ b/core/backup_manager.py @@ -164,18 +164,19 @@ class BackupManager: queue.put(('current_path', rsync_dest)) - rsync_command_parts = ['rsync', '-aAXHv'] if is_system else ['rsync', '-aL'] + rsync_command_parts = [ + 'rsync', '-aAXHv'] if is_system else ['rsync', '-aLv'] if mode == "incremental" and latest_full_backup_path and not is_dry_run: rsync_command_parts.append( - f"--link-dest='{latest_full_backup_path}'") + f"--link-dest={latest_full_backup_path}") rsync_command_parts.extend(['--info=progress2']) if exclude_files: rsync_command_parts.extend( - [f"--exclude-from='{f}'" for f in exclude_files]) + [f"--exclude-from={f}" for f in exclude_files]) if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists(): rsync_command_parts.append( - f"--exclude-from='{AppConfig.MANUAL_EXCLUDE_LIST_PATH}'") + f"--exclude-from={AppConfig.MANUAL_EXCLUDE_LIST_PATH}") if is_dry_run: rsync_command_parts.append('--dry-run') @@ -192,13 +193,12 @@ class BackupManager: f"--exclude='{os.path.basename(trash_bin_path)}/'") if is_system: - # For system backup, build a shell command string with quoted paths. - rsync_command_parts.extend([f"'{source_path}'", f"'{rsync_dest}'"]) + rsync_command_parts.extend( + [f"'{source_path}'", f"'{rsync_dest}'"]) rsync_cmd_str = ' '.join(rsync_command_parts) full_system_cmd = f"mkdir -p '{rsync_dest}' && {rsync_cmd_str}" command = ['pkexec', 'bash', '-c', full_system_cmd] else: - # For user backup, pass a list of args to Popen without extra quotes. rsync_command_parts.extend([source_path, rsync_dest]) os.makedirs(rsync_dest, exist_ok=True) command = rsync_command_parts @@ -213,7 +213,15 @@ class BackupManager: status = 'success' if return_code == 0 else 'warning' if return_code in [ 23, 24] else 'cancelled' if return_code in [143, -15, 15, -9] else 'error' if status in ['success', 'warning'] and not is_dry_run: - final_size = self._get_directory_size(rsync_dest) + if mode == 'incremental' and latest_full_backup_path: + if is_system: + final_size = self._get_incremental_size_system( + rsync_dest) + else: + final_size = self._get_incremental_size_user( + rsync_dest, latest_full_backup_path) + else: + final_size = self._get_directory_size(rsync_dest) self._create_info_json( base_dest_path=base_dest_path, backup_dir_name=backup_dir_name, @@ -236,6 +244,81 @@ class BackupManager: self._uninhibit_screensaver() self.process = None + def estimate_incremental_size(self, source_path: str, is_system: bool, source_name: str, base_dest_path: str, is_encrypted: bool, exclude_files: list) -> int: + """ + Calculates the approximate size of an incremental backup using rsync's + dry-run feature. + """ + self.logger.log( + f"Estimating incremental backup size for source: {source_path}") + + if not base_dest_path: + self.logger.log( + "No destination path provided, cannot estimate incremental size.") + return 0 + + profile_path = self._get_profile_path( + base_dest_path, is_system, source_name, is_encrypted) + latest_backup_path = self._find_latest_backup( + profile_path, source_name) + + if not latest_backup_path: + self.logger.log( + "No previous full backup found. Accurate incremental size cannot be estimated, returning 0.") + return 0 + + command = [] + if is_system: + command.extend(['pkexec', 'rsync', '-aAXHvn', '--stats']) + else: + command.extend(['rsync', '-avn', '--stats']) + + command.append(f"--link-dest={latest_backup_path}") + + if exclude_files: + for exclude_file in exclude_files: + command.append(f"--exclude-from={exclude_file}") + + try: + with tempfile.TemporaryDirectory() as dummy_dest: + command.extend([source_path, dummy_dest]) + + self.logger.log( + f"Executing rsync dry-run command: {' '.join(command)}") + + result = subprocess.run( + command, capture_output=True, text=True, check=False) + + # rsync exit code 24 means some files vanished during transfer, which is okay for a dry-run estimate. + if result.returncode != 0 and result.returncode != 24: + self.logger.log( + f"Rsync dry-run failed with code {result.returncode}: {result.stderr}") + return 0 + + output = result.stdout + "\\n" + result.stderr + match = re.search( + r"Total transferred file size: ([\d,.]+) bytes", output) + if match: + size_str = match.group(1).replace(',', '').replace('.', '') + size_bytes = int(size_str) + self.logger.log( + f"Estimated incremental backup size: {size_bytes} bytes") + return size_bytes + else: + self.logger.log( + "Could not find 'Total transferred file size' in rsync output.") + self.logger.log( + f"Full rsync output for debugging:\\n{output}") + return 0 + + except FileNotFoundError: + self.logger.log("Error: 'rsync' or 'pkexec' command not found.") + return 0 + except Exception as e: + self.logger.log( + f"An unexpected error occurred during incremental size estimation: {e}") + return 0 + def _get_directory_size(self, path: str) -> int: if not os.path.isdir(path): return 0 @@ -260,6 +343,42 @@ class BackupManager: f"Fallback size calculation also failed for {path}: {fallback_e}") return 0 + def _get_incremental_size_user(self, inc_path: str, full_path: str) -> int: + total_size = 0 + for dirpath, _, filenames in os.walk(inc_path): + for filename in filenames: + inc_file_path = os.path.join(dirpath, filename) + relative_path = os.path.relpath(inc_file_path, inc_path) + full_file_path = os.path.join(full_path, relative_path) + try: + inc_stat = os.stat(inc_file_path) + if os.path.exists(full_file_path): + full_stat = os.stat(full_file_path) + if inc_stat.st_ino == full_stat.st_ino: + continue + total_size += inc_stat.st_size + except FileNotFoundError: + continue + return total_size + + def _get_incremental_size_system(self, inc_path: str) -> int: + self.logger.log( + f"Calculating incremental size for system backup: {inc_path}") + command_str = f"find '{inc_path}' -type f -links 1 -print0 | xargs -0 stat -c %s | awk '{{s+=$1}} END {{print s}}'" + try: + full_command = ['pkexec', 'bash', '-c', command_str] + result = subprocess.run( + full_command, capture_output=True, text=True, check=True) + output = result.stdout.strip() + if output: + return int(output) + else: + return 0 + except (subprocess.CalledProcessError, FileNotFoundError, ValueError) as e: + self.logger.log( + f"Failed to calculate incremental system backup size: {e}") + return self._get_directory_size(inc_path) + def list_all_backups(self, base_dest_path: str, mount_if_needed: bool = True): pybackup_dir = os.path.join(base_dest_path, "pybackup") metadata_dir = os.path.join(pybackup_dir, "metadata") @@ -627,4 +746,4 @@ if [ -n "{delete_path}" ] && [ "{delete_path}" != "/" ]; then rm -rf "{delete_pa except ProcessLookupError: self.logger.log("Process already finished.") except Exception as e: - self.logger.log(f"Error cancelling process: {e}") \ No newline at end of file + self.logger.log(f"Error cancelling process: {e}") diff --git a/core/data_processing.py b/core/data_processing.py index 5b20cbc..84bfe83 100644 --- a/core/data_processing.py +++ b/core/data_processing.py @@ -66,7 +66,6 @@ class DataProcessing: if exclude_patterns is None: exclude_patterns = [] - # Compile exclude patterns into a single regex for performance if exclude_patterns: exclude_regex = re.compile( '|'.join(fnmatch.translate(p) for p in exclude_patterns)) @@ -75,7 +74,7 @@ class DataProcessing: for dirpath, dirnames, filenames in os.walk(path, topdown=True): if stop_event.is_set(): - return # Stop the calculation + return if exclude_regex: dirnames[:] = [d for d in dirnames if not exclude_regex.match( @@ -83,7 +82,7 @@ class DataProcessing: for f in filenames: if stop_event.is_set(): - return # Stop the calculation + return fp = os.path.join(dirpath, f) if not exclude_regex or not exclude_regex.match(fp): @@ -101,11 +100,11 @@ class DataProcessing: total_size = 0 for dirpath, dirnames, filenames in os.walk(path, topdown=True): if stop_event.is_set(): - return # Stop the calculation + return for f in filenames: if stop_event.is_set(): - return # Stop the calculation + return fp = os.path.join(dirpath, f) if not os.path.islink(fp): @@ -115,92 +114,4 @@ class DataProcessing: pass if not stop_event.is_set(): - self.app.queue.put((button_text, total_size, mode)) - - def get_incremental_backup_size(self, source_path: str, dest_path: str, is_system: bool, exclude_files: list = None) -> int: - """ - Calculates the approximate size of an incremental backup using rsync's dry-run feature. - This is much faster than walking the entire file tree. - """ - app_logger.log(f"Calculating incremental backup size for source: {source_path}") - - parent_dest = os.path.dirname(dest_path) - if not os.path.exists(parent_dest): - # If the parent destination doesn't exist, there are no previous backups to link to. - # In this case, the incremental size is the full size of the source. - # We can use the existing full-size calculation method. - # This is a simplified approach for the estimation. - # A more accurate one would run rsync without --link-dest. - app_logger.log("Destination parent does not exist, cannot calculate incremental size. Returning 0.") - return 0 - - # Find the latest backup to link against - try: - backups = sorted([d for d in os.listdir(parent_dest) if os.path.isdir(os.path.join(parent_dest, d))], reverse=True) - if not backups: - app_logger.log("No previous backups found. Incremental size is full size.") - return 0 # Or trigger a full size calculation - latest_backup_path = os.path.join(parent_dest, backups[0]) - except FileNotFoundError: - app_logger.log("Could not list backups, assuming no prior backups exist.") - return 0 - - - command = [] - if is_system: - command.extend(['pkexec', 'rsync', '-aAXHvn', '--stats']) - else: - command.extend(['rsync', '-avn', '--stats']) - - command.append(f"--link-dest={latest_backup_path}") - - if exclude_files: - for exclude_file in exclude_files: - command.append(f"--exclude-from={exclude_file}") - - if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists(): - command.append(f"--exclude-from={AppConfig.MANUAL_EXCLUDE_LIST_PATH}") - - # The destination for a dry run can be a dummy path, but it must exist. - # Let's use a temporary directory. - dummy_dest = os.path.join(parent_dest, "dry_run_dest") - os.makedirs(dummy_dest, exist_ok=True) - - command.extend([source_path, dummy_dest]) - - app_logger.log(f"Executing rsync dry-run command: {' '.join(command)}") - - try: - result = subprocess.run(command, capture_output=True, text=True, check=False) - - # Clean up the dummy directory - shutil.rmtree(dummy_dest) - - if result.returncode != 0: - app_logger.log(f"Rsync dry-run failed with code {result.returncode}: {result.stderr}") - return 0 - - output = result.stdout + "\n" + result.stderr - # The regex now accepts dots as thousands separators (e.g., 1.234.567). - match = re.search(r"Total transferred file size: ([\d,.]+) bytes", output) - if match: - # Remove both dots and commas before converting to an integer. - size_str = match.group(1).replace(',', '').replace('.', '') - size_bytes = int(size_str) - app_logger.log(f"Estimated incremental backup size: {size_bytes} bytes") - return size_bytes - else: - app_logger.log("Could not find 'Total transferred file size' in rsync output.") - # Log the output just in case something changes in the future - app_logger.log(f"Full rsync output for debugging:\n{output}") - return 0 - - except FileNotFoundError: - app_logger.log("Error: 'rsync' or 'pkexec' command not found.") - return 0 - except Exception as e: - app_logger.log(f"An unexpected error occurred during incremental size calculation: {e}") - return 0 - - # The queue processing logic has been moved to main_app.py - # to fix a race condition and ensure all queue messages are handled correctly. + self.app.queue.put((button_text, total_size, mode)) \ No newline at end of file diff --git a/pyimage_ui/actions.py b/pyimage_ui/actions.py index 32323bf..b38434b 100644 --- a/pyimage_ui/actions.py +++ b/pyimage_ui/actions.py @@ -30,7 +30,6 @@ class Actions: source_name = self.app.left_canvas_data.get('folder') is_system_backup = (source_name == "Computer") - # Re-enable controls for user backups, disable for system unless conditions are met if not is_system_backup: self.app.full_backup_cb.config(state='normal') self.app.incremental_cb.config(state='normal') @@ -38,7 +37,6 @@ class Actions: self.app.full_backup_cb.config(state='normal') self.app.incremental_cb.config(state='normal') - # Handle forced settings from advanced config, which have top priority if self.app.config_manager.get_setting("force_full_backup", False): self._set_backup_type("full") self.app.full_backup_cb.config(state='disabled') @@ -50,21 +48,17 @@ class Actions: self.app.incremental_cb.config(state='disabled') return - # Default to Full if no destination is set if not self.app.destination_path or not os.path.isdir(self.app.destination_path): self._set_backup_type("full") return is_encrypted = self.app.encrypted_var.get() - # Use the new detection logic for both user and system backups - # Note: For system backups, source_name is "Computer". We might need a more specific profile name. - # For now, we adapt it to the check_for_full_backup function's expectation. profile_name = "system" if is_system_backup else source_name full_backup_exists = self.app.backup_manager.check_for_full_backup( dest_path=self.app.destination_path, - source_name=profile_name, # Using a profile name now + source_name=profile_name, is_encrypted=is_encrypted ) @@ -74,14 +68,12 @@ class Actions: self._set_backup_type("full") def _refresh_backup_options_ui(self): - # Reset enabled/disabled state for all potentially affected controls self.app.full_backup_cb.config(state="normal") self.app.incremental_cb.config(state="normal") self.app.compressed_cb.config(state="normal") self.app.encrypted_cb.config(state="normal") self.app.accurate_size_cb.config(state="normal") - # Apply logic: Encryption and Compression are mutually exclusive if self.app.encrypted_var.get(): self.app.compressed_var.set(False) self.app.compressed_cb.config(state="disabled") @@ -89,7 +81,6 @@ class Actions: if self.app.compressed_var.get(): self.app.encrypted_var.set(False) self.app.encrypted_cb.config(state="disabled") - # Compression forces full backup self.app.vollbackup_var.set(True) self.app.inkrementell_var.set(False) self.app.full_backup_cb.config(state="disabled") @@ -97,7 +88,6 @@ class Actions: self.app.accurate_size_cb.config(state="disabled") self.app.genaue_berechnung_var.set(False) - # After setting the states, determine the final full/incremental choice self._update_backup_type_controls() def handle_backup_type_change(self, changed_var_name): @@ -156,28 +146,27 @@ class Actions: status = 'failure' size = 0 try: - exclude_file_paths = [] + is_system = (button_text == "Computer") + source_name = "system" if is_system else button_text + is_encrypted = self.app.encrypted_var.get() + + exclude_files = [] if AppConfig.GENERATED_EXCLUDE_LIST_PATH.exists(): - exclude_file_paths.append( - AppConfig.GENERATED_EXCLUDE_LIST_PATH) + exclude_files.append(AppConfig.GENERATED_EXCLUDE_LIST_PATH) if AppConfig.USER_EXCLUDE_LIST_PATH.exists(): - exclude_file_paths.append(AppConfig.USER_EXCLUDE_LIST_PATH) + exclude_files.append(AppConfig.USER_EXCLUDE_LIST_PATH) if AppConfig.MANUAL_EXCLUDE_LIST_PATH.exists(): - exclude_file_paths.append( - AppConfig.MANUAL_EXCLUDE_LIST_PATH) + exclude_files.append(AppConfig.MANUAL_EXCLUDE_LIST_PATH) - base_dest = self.app.destination_path - correct_parent_dir = os.path.join(base_dest, "pybackup") - dummy_dest_for_calc = os.path.join( - correct_parent_dir, "dummy_name") - - size = self.app.data_processing.get_incremental_backup_size( + size = self.app.backup_manager.estimate_incremental_size( source_path=folder_path, - dest_path=dummy_dest_for_calc, - is_system=True, - exclude_files=exclude_file_paths + is_system=is_system, + source_name=source_name, + base_dest_path=self.app.destination_path, + is_encrypted=is_encrypted, + exclude_files=exclude_files ) - status = 'success' if size > 0 else 'failure' + status = 'success' except Exception as e: app_logger.log(f"Error during threaded_incremental_calc: {e}") status = 'failure' @@ -202,8 +191,6 @@ class Actions: self.app.navigation.toggle_mode( self.app.mode, trigger_calculation=False) - # self.app.log_window.clear_log() - REVERSE_FOLDER_MAP = { "Computer": "Computer", Msg.STR["cat_documents"]: "Documents", @@ -252,7 +239,6 @@ class Actions: self._start_left_canvas_calculation( button_text, str(folder_path), icon_name, extra_info) - # Update sync mode display when source changes self.app._update_sync_mode_display() def _start_left_canvas_calculation(self, button_text, folder_path, icon_name, extra_info): @@ -346,7 +332,6 @@ class Actions: def _update_right_canvas_info(self, path): try: if self.app.mode == "backup": - # Unmount previous destination if it was mounted if self.app.destination_path: self.app.backup_manager.encryption_manager.unmount( self.app.destination_path) @@ -373,13 +358,13 @@ class Actions: size_str = f"{used / (1024**3):.2f} GB / {total / (1024**3):.2f} GB" self.app.right_canvas_data.update({ - 'folder': os.path.basename(path.rstrip('/')), + 'folder': os.path.basename(path.rstrip('/')), 'path_display': path, 'size': size_str }) self.app.config_manager.set_setting( "backup_destination_path", path) - self.app.header_frame.refresh_status() # Refresh keyring status + self.app.header_frame.refresh_status() self.app.drawing.redraw_right_canvas() self.app.drawing.update_target_projection() @@ -390,7 +375,7 @@ class Actions: elif self.app.mode == "restore": self.app.right_canvas_data.update({ - 'folder': os.path.basename(path.rstrip('/')), + 'folder': os.path.basename(path.rstrip('/')), 'path_display': path, 'size': '' }) @@ -406,7 +391,7 @@ class Actions: def reset_to_default_settings(self): - + self.app.config_manager.set_setting("backup_destination_path", None) self.app.config_manager.set_setting("restore_source_path", None) self.app.config_manager.set_setting("restore_destination_path", None) @@ -605,7 +590,6 @@ class Actions: self.app.start_pause_button["text"] = Msg.STR["cancel_backup"] self.app.update_idletasks() - # self.app.log_window.clear_log() self._set_ui_state(False, allow_log_and_backup_toggle=True) self.app.animated_icon.start() @@ -630,7 +614,6 @@ class Actions: self.app.last_backup_was_system = False self._start_user_backup() else: # restore mode - # Restore logic would go here pass def _start_system_backup(self, mode, source_size_bytes): @@ -647,7 +630,6 @@ class Actions: return is_encrypted = self.app.encrypted_var.get() - # Password handling is now managed within the backup manager if needed for mounting source_size_bytes = self.app.left_canvas_data.get('total_bytes', 0) @@ -665,7 +647,7 @@ class Actions: self.app.backup_manager.start_backup( queue=self.app.queue, source_path="/", - dest_path=base_dest, # Pass the base destination path + dest_path=base_dest, is_system=True, source_name="system", is_dry_run=is_dry_run, @@ -690,7 +672,6 @@ class Actions: return is_encrypted = self.app.encrypted_var.get() - # Password handling is now managed within the backup manager if needed for mounting mode = "full" if self.app.vollbackup_var.get() else "incremental" @@ -704,11 +685,11 @@ class Actions: self.app.backup_manager.start_backup( queue=self.app.queue, source_path=source_path, - dest_path=base_dest, # Pass the base destination path + dest_path=base_dest, is_system=False, source_name=source_name, is_dry_run=is_dry_run, - exclude_files=None, # User backups don't use the global exclude list here + exclude_files=None, source_size=source_size_bytes, is_compressed=is_compressed, is_encrypted=is_encrypted, diff --git a/pyimage_ui/system_backup_content_frame.py b/pyimage_ui/system_backup_content_frame.py index b340046..f1b1fa7 100644 --- a/pyimage_ui/system_backup_content_frame.py +++ b/pyimage_ui/system_backup_content_frame.py @@ -150,16 +150,12 @@ class SystemBackupContentFrame(ttk.Frame): "Password entry cancelled, aborting deletion.") return - info_file_to_delete = os.path.join( - self.backup_path, "pybackup", f"{selected_item_id}{'_encrypted' if is_encrypted else ''}.txt") - self.actions._set_ui_state(False) self.parent_view.show_deletion_status( Msg.STR["deleting_backup_in_progress"]) self.backup_manager.start_delete_backup( path_to_delete=folder_to_delete, - info_file_path=info_file_to_delete, is_encrypted=is_encrypted, is_system=True, base_dest_path=self.backup_path,