Fix for potential data loss with "patterns_from". Also, display excluded files (#590).
This commit is contained in:
parent
ba8fbe7a44
commit
c31702d092
3 changed files with 36 additions and 13 deletions
9
NEWS
9
NEWS
|
@ -5,6 +5,13 @@
|
|||
files for a "create" action to prevent Borg from hanging.
|
||||
* #587: Warn when ignoring a configured "read_special" value of false, as true is needed when
|
||||
database hooks are enabled.
|
||||
* #590: Fix for potential data loss (data not getting backed up) when the "patterns_from" option
|
||||
was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into
|
||||
"source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into
|
||||
patterns whenever "patterns_from" is used, working around a Borg bug:
|
||||
https://github.com/borgbackup/borg/issues/6994
|
||||
* #590: In "borgmatic create --list" output, display which files get excluded from the backup due
|
||||
to patterns or excludes.
|
||||
* #591: Add support for Borg 2's "--match-archives" flag (replaces "--glob-archives").
|
||||
* Fix for "borgmatic --archive latest" not finding the latest archive when a verbosity is set.
|
||||
|
||||
|
@ -20,7 +27,7 @@
|
|||
* #574: Fix for potential data loss (data not getting backed up) when the "patterns" option was
|
||||
used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into
|
||||
"source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into
|
||||
patterns whenever "patterns" is used, working around a potential Borg bug:
|
||||
patterns whenever "patterns" is used, working around a Borg bug:
|
||||
https://github.com/borgbackup/borg/issues/6994
|
||||
|
||||
1.7.0
|
||||
|
|
|
@ -112,7 +112,7 @@ def write_pattern_file(patterns=None, sources=None, pattern_file=None):
|
|||
If an optional open pattern file is given, overwrite it instead of making a new temporary file.
|
||||
Return None if no patterns are provided.
|
||||
'''
|
||||
if not patterns:
|
||||
if not patterns and not sources:
|
||||
return None
|
||||
|
||||
if pattern_file is None:
|
||||
|
@ -121,7 +121,7 @@ def write_pattern_file(patterns=None, sources=None, pattern_file=None):
|
|||
pattern_file.seek(0)
|
||||
|
||||
pattern_file.write(
|
||||
'\n'.join(tuple(patterns) + tuple(f'R {source}' for source in (sources or [])))
|
||||
'\n'.join(tuple(patterns or ()) + tuple(f'R {source}' for source in (sources or [])))
|
||||
)
|
||||
pattern_file.flush()
|
||||
|
||||
|
@ -311,11 +311,18 @@ def create_archive(
|
|||
),
|
||||
)
|
||||
|
||||
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
|
||||
|
||||
try:
|
||||
working_directory = os.path.expanduser(location_config.get('working_directory'))
|
||||
except TypeError:
|
||||
working_directory = None
|
||||
pattern_file = write_pattern_file(location_config.get('patterns'), sources)
|
||||
|
||||
pattern_file = (
|
||||
write_pattern_file(location_config.get('patterns'), sources)
|
||||
if location_config.get('patterns') or location_config.get('patterns_from')
|
||||
else None
|
||||
)
|
||||
exclude_file = write_pattern_file(
|
||||
expand_home_directories(location_config.get('exclude_patterns'))
|
||||
)
|
||||
|
@ -354,8 +361,6 @@ def create_archive(
|
|||
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
|
||||
)
|
||||
|
||||
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
|
||||
|
||||
if stream_processes and location_config.get('read_special') is False:
|
||||
logger.warning(
|
||||
f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
|
||||
|
@ -385,7 +390,7 @@ def create_archive(
|
|||
+ (('--remote-path', remote_path) if remote_path else ())
|
||||
+ (('--umask', str(umask)) if umask else ())
|
||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||
+ (('--list', '--filter', 'AME-') if list_files and not json and not progress else ())
|
||||
+ (('--list', '--filter', 'AMEx-') if list_files and not json and not progress else ())
|
||||
+ (('--dry-run',) if dry_run else ())
|
||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)
|
||||
|
@ -425,6 +430,7 @@ def create_archive(
|
|||
),
|
||||
pattern_file=exclude_file,
|
||||
)
|
||||
|
||||
if exclude_file:
|
||||
create_command += make_exclude_flags(location_config, exclude_file.name)
|
||||
|
||||
|
|
|
@ -130,6 +130,14 @@ def test_write_pattern_file_with_sources_writes_sources_as_roots():
|
|||
module.write_pattern_file(['R /foo', '+ /foo/bar'], sources=['/baz', '/quux'])
|
||||
|
||||
|
||||
def test_write_pattern_file_without_patterns_but_with_sources_writes_sources_as_roots():
|
||||
temporary_file = flexmock(name='filename', flush=lambda: None)
|
||||
temporary_file.should_receive('write').with_args('R /baz\nR /quux')
|
||||
flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file)
|
||||
|
||||
module.write_pattern_file([], sources=['/baz', '/quux'])
|
||||
|
||||
|
||||
def test_write_pattern_file_with_empty_exclude_patterns_does_not_raise():
|
||||
module.write_pattern_file([])
|
||||
|
||||
|
@ -1146,10 +1154,12 @@ def test_create_archive_with_read_special_adds_special_files_to_excludes():
|
|||
flexmock(module).should_receive('expand_directories').and_return(())
|
||||
flexmock(module).should_receive('pattern_root_directories').and_return([])
|
||||
flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError)
|
||||
flexmock(module).should_receive('expand_home_directories').and_return(())
|
||||
flexmock(module).should_receive('expand_home_directories').and_return(()).and_return(
|
||||
('special',)
|
||||
)
|
||||
flexmock(module).should_receive('write_pattern_file').and_return(None).and_return(
|
||||
None
|
||||
).and_return(flexmock(name='/excludes'))
|
||||
flexmock(name='/excludes')
|
||||
)
|
||||
flexmock(module.feature).should_receive('available').and_return(True)
|
||||
flexmock(module).should_receive('ensure_files_readable')
|
||||
flexmock(module).should_receive('make_pattern_flags').and_return(())
|
||||
|
@ -1160,7 +1170,7 @@ def test_create_archive_with_read_special_adds_special_files_to_excludes():
|
|||
(f'repo::{DEFAULT_ARCHIVE_NAME}',)
|
||||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('collect_special_file_paths').and_return(())
|
||||
flexmock(module).should_receive('collect_special_file_paths').and_return(('special',))
|
||||
create_command = ('borg', 'create', '--read-special') + REPO_ARCHIVE_WITH_PATHS
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
create_command + ('--dry-run', '--list'),
|
||||
|
@ -1639,7 +1649,7 @@ def test_create_archive_with_files_calls_borg_with_list_parameter_and_warning_ou
|
|||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'create', '--list', '--filter', 'AME-') + REPO_ARCHIVE_WITH_PATHS,
|
||||
('borg', 'create', '--list', '--filter', 'AMEx-') + REPO_ARCHIVE_WITH_PATHS,
|
||||
output_log_level=logging.WARNING,
|
||||
output_file=None,
|
||||
borg_local_path='borg',
|
||||
|
@ -1679,7 +1689,7 @@ def test_create_archive_with_files_and_log_info_calls_borg_with_list_parameter_a
|
|||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'create', '--list', '--filter', 'AME-') + REPO_ARCHIVE_WITH_PATHS + ('--info',),
|
||||
('borg', 'create', '--list', '--filter', 'AMEx-') + REPO_ARCHIVE_WITH_PATHS + ('--info',),
|
||||
output_log_level=logging.INFO,
|
||||
output_file=None,
|
||||
borg_local_path='borg',
|
||||
|
|
Loading…
Reference in a new issue