Include multiple configuration files with a single "!include" (#732).
Reviewed-on: https://projects.torsion.org/borgmatic-collective/borgmatic/pulls/737
This commit is contained in:
commit
e607de7df1
9 changed files with 528 additions and 139 deletions
3
NEWS
3
NEWS
|
@ -8,6 +8,9 @@
|
||||||
* #728: Fix for "prune" action error when using the "keep_exclude_tags" option.
|
* #728: Fix for "prune" action error when using the "keep_exclude_tags" option.
|
||||||
* #730: Fix for Borg's interactive prompt on the "check --repair" action automatically getting
|
* #730: Fix for Borg's interactive prompt on the "check --repair" action automatically getting
|
||||||
answered "NO" even when the "check_i_know_what_i_am_doing" option isn't set.
|
answered "NO" even when the "check_i_know_what_i_am_doing" option isn't set.
|
||||||
|
* #732: Include multiple configuration files with a single "!include". See the documentation for
|
||||||
|
more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#multiple-merge-includes
|
||||||
|
|
||||||
1.8.0
|
1.8.0
|
||||||
* #575: BREAKING: For the "borgmatic borg" action, instead of implicitly injecting
|
* #575: BREAKING: For the "borgmatic borg" action, instead of implicitly injecting
|
||||||
|
|
|
@ -331,9 +331,8 @@ def make_parsers():
|
||||||
|
|
||||||
global_plus_action_parser = ArgumentParser(
|
global_plus_action_parser = ArgumentParser(
|
||||||
description='''
|
description='''
|
||||||
Simple, configuration-driven backup software for servers and workstations. If none of
|
Simple, configuration-driven backup software for servers and workstations. If no actions
|
||||||
the action options are given, then borgmatic defaults to: create, prune, compact, and
|
are given, then borgmatic defaults to: create, prune, compact, and check.
|
||||||
check.
|
|
||||||
''',
|
''',
|
||||||
parents=[global_parser],
|
parents=[global_parser],
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import functools
|
import functools
|
||||||
|
import itertools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import operator
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
|
@ -8,34 +10,67 @@ import ruamel.yaml
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def probe_and_include_file(filename, include_directories):
|
||||||
|
'''
|
||||||
|
Given a filename to include and a list of include directories to search for matching files,
|
||||||
|
probe for the file, load it, and return the loaded configuration as a data structure of nested
|
||||||
|
dicts, lists, etc.
|
||||||
|
|
||||||
|
Raise FileNotFoundError if the included file was not found.
|
||||||
|
'''
|
||||||
|
expanded_filename = os.path.expanduser(filename)
|
||||||
|
|
||||||
|
if os.path.isabs(expanded_filename):
|
||||||
|
return load_configuration(expanded_filename)
|
||||||
|
|
||||||
|
candidate_filenames = {
|
||||||
|
os.path.join(directory, expanded_filename) for directory in include_directories
|
||||||
|
}
|
||||||
|
|
||||||
|
for candidate_filename in candidate_filenames:
|
||||||
|
if os.path.exists(candidate_filename):
|
||||||
|
return load_configuration(candidate_filename)
|
||||||
|
|
||||||
|
raise FileNotFoundError(
|
||||||
|
f'Could not find include {filename} at {" or ".join(candidate_filenames)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def include_configuration(loader, filename_node, include_directory):
|
def include_configuration(loader, filename_node, include_directory):
|
||||||
'''
|
'''
|
||||||
Given a ruamel.yaml.loader.Loader, a ruamel.yaml.serializer.ScalarNode containing the included
|
Given a ruamel.yaml.loader.Loader, a ruamel.yaml.nodes.ScalarNode containing the included
|
||||||
filename, and an include directory path to search for matching files, load the given YAML
|
filename (or a list containing multiple such filenames), and an include directory path to search
|
||||||
filename (ignoring the given loader so we can use our own) and return its contents as a data
|
for matching files, load the given YAML filenames (ignoring the given loader so we can use our
|
||||||
structure of nested dicts and lists. If the filename is relative, probe for it within 1. the
|
own) and return their contents as data structure of nested dicts, lists, etc. If the given
|
||||||
current working directory and 2. the given include directory.
|
filename node's value is a scalar string, then the return value will be a single value. But if
|
||||||
|
the given node value is a list, then the return value will be a list of values, one per loaded
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
If a filename is relative, probe for it within 1. the current working directory and 2. the given
|
||||||
|
include directory.
|
||||||
|
|
||||||
Raise FileNotFoundError if an included file was not found.
|
Raise FileNotFoundError if an included file was not found.
|
||||||
'''
|
'''
|
||||||
include_directories = [os.getcwd(), os.path.abspath(include_directory)]
|
include_directories = [os.getcwd(), os.path.abspath(include_directory)]
|
||||||
include_filename = os.path.expanduser(filename_node.value)
|
|
||||||
|
|
||||||
if not os.path.isabs(include_filename):
|
if isinstance(filename_node.value, str):
|
||||||
candidate_filenames = [
|
return probe_and_include_file(filename_node.value, include_directories)
|
||||||
os.path.join(directory, include_filename) for directory in include_directories
|
|
||||||
|
if (
|
||||||
|
isinstance(filename_node.value, list)
|
||||||
|
and len(filename_node.value)
|
||||||
|
and isinstance(filename_node.value[0], ruamel.yaml.nodes.ScalarNode)
|
||||||
|
):
|
||||||
|
# Reversing the values ensures the correct ordering if these includes are subsequently
|
||||||
|
# merged together.
|
||||||
|
return [
|
||||||
|
probe_and_include_file(node.value, include_directories)
|
||||||
|
for node in reversed(filename_node.value)
|
||||||
]
|
]
|
||||||
|
|
||||||
for candidate_filename in candidate_filenames:
|
raise ValueError(
|
||||||
if os.path.exists(candidate_filename):
|
'!include value is not supported; use a single filename or a list of filenames'
|
||||||
include_filename = candidate_filename
|
)
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise FileNotFoundError(
|
|
||||||
f'Could not find include {filename_node.value} at {" or ".join(candidate_filenames)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
return load_configuration(include_filename)
|
|
||||||
|
|
||||||
|
|
||||||
def raise_retain_node_error(loader, node):
|
def raise_retain_node_error(loader, node):
|
||||||
|
@ -53,7 +88,7 @@ def raise_retain_node_error(loader, node):
|
||||||
'The !retain tag may only be used within a configuration file containing a merged !include tag.'
|
'The !retain tag may only be used within a configuration file containing a merged !include tag.'
|
||||||
)
|
)
|
||||||
|
|
||||||
raise ValueError('The !retain tag may only be used on a YAML mapping or sequence.')
|
raise ValueError('The !retain tag may only be used on a mapping or list.')
|
||||||
|
|
||||||
|
|
||||||
def raise_omit_node_error(loader, node):
|
def raise_omit_node_error(loader, node):
|
||||||
|
@ -65,14 +100,14 @@ def raise_omit_node_error(loader, node):
|
||||||
tags are handled by deep_merge_nodes() below.
|
tags are handled by deep_merge_nodes() below.
|
||||||
'''
|
'''
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'The !omit tag may only be used on a scalar (e.g., string) list element within a configuration file containing a merged !include tag.'
|
'The !omit tag may only be used on a scalar (e.g., string) or list element within a configuration file containing a merged !include tag.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Include_constructor(ruamel.yaml.SafeConstructor):
|
class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
'''
|
'''
|
||||||
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
||||||
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
|
separate YAML configuration files. Example syntax: `option: !include common.yaml`
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self, preserve_quotes=None, loader=None, include_directory=None):
|
def __init__(self, preserve_quotes=None, loader=None, include_directory=None):
|
||||||
|
@ -81,6 +116,9 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
'!include',
|
'!include',
|
||||||
functools.partial(include_configuration, include_directory=include_directory),
|
functools.partial(include_configuration, include_directory=include_directory),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# These are catch-all error handlers for tags that don't get applied and removed by
|
||||||
|
# deep_merge_nodes() below.
|
||||||
self.add_constructor('!retain', raise_retain_node_error)
|
self.add_constructor('!retain', raise_retain_node_error)
|
||||||
self.add_constructor('!omit', raise_omit_node_error)
|
self.add_constructor('!omit', raise_omit_node_error)
|
||||||
|
|
||||||
|
@ -90,8 +128,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
using the YAML '<<' merge key. Example syntax:
|
using the YAML '<<' merge key. Example syntax:
|
||||||
|
|
||||||
```
|
```
|
||||||
retention:
|
option:
|
||||||
keep_daily: 1
|
sub_option: 1
|
||||||
|
|
||||||
<<: !include common.yaml
|
<<: !include common.yaml
|
||||||
```
|
```
|
||||||
|
@ -104,9 +142,15 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
|
|
||||||
for index, (key_node, value_node) in enumerate(node.value):
|
for index, (key_node, value_node) in enumerate(node.value):
|
||||||
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
|
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
|
||||||
included_value = representer.represent_data(self.construct_object(value_node))
|
# Replace the merge include with a sequence of included configuration nodes ready
|
||||||
node.value[index] = (key_node, included_value)
|
# for merging. The construct_object() call here triggers include_configuration()
|
||||||
|
# among other constructors.
|
||||||
|
node.value[index] = (
|
||||||
|
key_node,
|
||||||
|
representer.represent_data(self.construct_object(value_node)),
|
||||||
|
)
|
||||||
|
|
||||||
|
# This super().flatten_mapping() call actually performs "<<" merges.
|
||||||
super(Include_constructor, self).flatten_mapping(node)
|
super(Include_constructor, self).flatten_mapping(node)
|
||||||
|
|
||||||
node.value = deep_merge_nodes(node.value)
|
node.value = deep_merge_nodes(node.value)
|
||||||
|
@ -138,7 +182,12 @@ def load_configuration(filename):
|
||||||
file_contents = file.read()
|
file_contents = file.read()
|
||||||
config = yaml.load(file_contents)
|
config = yaml.load(file_contents)
|
||||||
|
|
||||||
if config and 'constants' in config:
|
try:
|
||||||
|
has_constants = bool(config and 'constants' in config)
|
||||||
|
except TypeError:
|
||||||
|
has_constants = False
|
||||||
|
|
||||||
|
if has_constants:
|
||||||
for key, value in config['constants'].items():
|
for key, value in config['constants'].items():
|
||||||
value = json.dumps(value)
|
value = json.dumps(value)
|
||||||
file_contents = file_contents.replace(f'{{{key}}}', value.strip('"'))
|
file_contents = file_contents.replace(f'{{{key}}}', value.strip('"'))
|
||||||
|
@ -149,53 +198,92 @@ def load_configuration(filename):
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def filter_omitted_nodes(nodes):
|
def filter_omitted_nodes(nodes, values):
|
||||||
'''
|
'''
|
||||||
Given a list of nodes, return a filtered list omitting any nodes with an "!omit" tag or with a
|
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
||||||
value matching such nodes.
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode as a key,
|
||||||
|
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
||||||
|
),
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
|
... and a combined list of all values for those nodes, return a filtered list of the values,
|
||||||
|
omitting any that have an "!omit" tag (or with a value matching such nodes).
|
||||||
|
|
||||||
|
But if only a single node is given, bail and return the given values unfiltered, as "!omit" only
|
||||||
|
applies when there are merge includes (and therefore multiple nodes).
|
||||||
'''
|
'''
|
||||||
omitted_values = tuple(node.value for node in nodes if node.tag == '!omit')
|
if len(nodes) <= 1:
|
||||||
|
return values
|
||||||
|
|
||||||
return [node for node in nodes if node.value not in omitted_values]
|
omitted_values = tuple(node.value for node in values if node.tag == '!omit')
|
||||||
|
|
||||||
|
return [node for node in values if node.value not in omitted_values]
|
||||||
|
|
||||||
|
|
||||||
DELETED_NODE = object()
|
def merge_values(nodes):
|
||||||
|
'''
|
||||||
|
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode as a key,
|
||||||
|
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
||||||
|
),
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
|
... merge its sequence or mapping node values and return the result. For sequence nodes, this
|
||||||
|
means appending together its contained lists. For mapping nodes, it means merging its contained
|
||||||
|
dicts.
|
||||||
|
'''
|
||||||
|
return functools.reduce(operator.add, (value.value for key, value in nodes))
|
||||||
|
|
||||||
|
|
||||||
def deep_merge_nodes(nodes):
|
def deep_merge_nodes(nodes):
|
||||||
'''
|
'''
|
||||||
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
||||||
|
|
||||||
|
[
|
||||||
(
|
(
|
||||||
ruamel.yaml.nodes.ScalarNode as a key,
|
ruamel.yaml.nodes.ScalarNode as a key,
|
||||||
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
||||||
),
|
),
|
||||||
|
...
|
||||||
|
]
|
||||||
|
|
||||||
... deep merge any node values corresponding to duplicate keys and return the result. If
|
... deep merge any node values corresponding to duplicate keys and return the result. The
|
||||||
there are colliding keys with non-MappingNode values (e.g., integers or strings), the last
|
purpose of merging like this is to support, for instance, merging one borgmatic configuration
|
||||||
of the values wins.
|
file into another for reuse, such that a configuration option with sub-options does not
|
||||||
|
completely replace the corresponding option in a merged file.
|
||||||
|
|
||||||
|
If there are colliding keys with scalar values (e.g., integers or strings), the last of the
|
||||||
|
values wins.
|
||||||
|
|
||||||
For instance, given node values of:
|
For instance, given node values of:
|
||||||
|
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option1'),
|
||||||
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
ScalarNode(tag='tag:yaml.org,2002:int', value='1')
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'),
|
||||||
ScalarNode(tag='tag:yaml.org,2002:int', value='7')
|
ScalarNode(tag='tag:yaml.org,2002:int', value='2')
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'),
|
||||||
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
|
@ -206,88 +294,95 @@ def deep_merge_nodes(nodes):
|
||||||
|
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option1'),
|
||||||
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
ScalarNode(tag='tag:yaml.org,2002:int', value='1')
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'),
|
||||||
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
),
|
),
|
||||||
]),
|
]),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
This function supports multi-way merging, meaning that if the same option name exists three or
|
||||||
|
more times (at the same scope level), all of those instances get merged together.
|
||||||
|
|
||||||
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
|
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
|
||||||
|
|
||||||
The purpose of deep merging like this is to support, for instance, merging one borgmatic
|
Raise ValueError if a merge is implied using multiple incompatible types.
|
||||||
configuration file into another for reuse, such that a configuration option with sub-options
|
|
||||||
does not completely replace the corresponding option in a merged file.
|
|
||||||
|
|
||||||
Raise ValueError if a merge is implied using two incompatible types.
|
|
||||||
'''
|
'''
|
||||||
# Map from original node key/value to the replacement merged node. DELETED_NODE as a replacement
|
merged_nodes = []
|
||||||
# node indications deletion.
|
|
||||||
replaced_nodes = {}
|
|
||||||
|
|
||||||
# To find nodes that require merging, compare each node with each other node.
|
def get_node_key_name(node):
|
||||||
for a_key, a_value in nodes:
|
return node[0].value
|
||||||
for b_key, b_value in nodes:
|
|
||||||
# If we've already considered one of the nodes for merging, skip it.
|
|
||||||
if (a_key, a_value) in replaced_nodes or (b_key, b_value) in replaced_nodes:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If the keys match and the values are different, we need to merge these two A and B nodes.
|
# Bucket the nodes by their keys. Then merge all of the values sharing the same key.
|
||||||
if a_key.tag == b_key.tag and a_key.value == b_key.value and a_value != b_value:
|
for key_name, grouped_nodes in itertools.groupby(
|
||||||
if not type(a_value) is type(b_value):
|
sorted(nodes, key=get_node_key_name), get_node_key_name
|
||||||
raise ValueError(
|
):
|
||||||
f'Incompatible types found when trying to merge "{a_key.value}:" values across configuration files: {type(a_value).id} and {type(b_value).id}'
|
grouped_nodes = list(grouped_nodes)
|
||||||
|
|
||||||
|
# The merged node inherits its attributes from the final node in the group.
|
||||||
|
(last_node_key, last_node_value) = grouped_nodes[-1]
|
||||||
|
value_types = set(type(value) for (_, value) in grouped_nodes)
|
||||||
|
|
||||||
|
if len(value_types) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
f'Incompatible types found when trying to merge "{key_name}:" values across configuration files: {", ".join(value_type.id for value_type in value_types)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
||||||
|
if ruamel.yaml.nodes.MappingNode in value_types:
|
||||||
|
# A "!retain" tag says to skip deep merging for this node. Replace the tag so
|
||||||
|
# downstream schema validation doesn't break on our application-specific tag.
|
||||||
|
if last_node_value.tag == '!retain' and len(grouped_nodes) > 1:
|
||||||
|
last_node_value.tag = 'tag:yaml.org,2002:map'
|
||||||
|
merged_nodes.append((last_node_key, last_node_value))
|
||||||
|
else:
|
||||||
|
merged_nodes.append(
|
||||||
|
(
|
||||||
|
last_node_key,
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag=last_node_value.tag,
|
||||||
|
value=deep_merge_nodes(merge_values(grouped_nodes)),
|
||||||
|
start_mark=last_node_value.start_mark,
|
||||||
|
end_mark=last_node_value.end_mark,
|
||||||
|
flow_style=last_node_value.flow_style,
|
||||||
|
comment=last_node_value.comment,
|
||||||
|
anchor=last_node_value.anchor,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Since we're merging into the B node, consider the A node a duplicate and remove it.
|
continue
|
||||||
replaced_nodes[(a_key, a_value)] = DELETED_NODE
|
|
||||||
|
|
||||||
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
# If we're dealing with SequenceNodes, merge by appending sequences together.
|
||||||
if isinstance(b_value, ruamel.yaml.nodes.MappingNode):
|
if ruamel.yaml.nodes.SequenceNode in value_types:
|
||||||
# A "!retain" tag says to skip deep merging for this node. Replace the tag so
|
if last_node_value.tag == '!retain' and len(grouped_nodes) > 1:
|
||||||
# downstream schema validation doesn't break on our application-specific tag.
|
last_node_value.tag = 'tag:yaml.org,2002:seq'
|
||||||
if b_value.tag == '!retain':
|
merged_nodes.append((last_node_key, last_node_value))
|
||||||
b_value.tag = 'tag:yaml.org,2002:map'
|
else:
|
||||||
else:
|
merged_nodes.append(
|
||||||
replaced_nodes[(b_key, b_value)] = (
|
(
|
||||||
b_key,
|
last_node_key,
|
||||||
ruamel.yaml.nodes.MappingNode(
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
tag=b_value.tag,
|
tag=last_node_value.tag,
|
||||||
value=deep_merge_nodes(a_value.value + b_value.value),
|
value=filter_omitted_nodes(grouped_nodes, merge_values(grouped_nodes)),
|
||||||
start_mark=b_value.start_mark,
|
start_mark=last_node_value.start_mark,
|
||||||
end_mark=b_value.end_mark,
|
end_mark=last_node_value.end_mark,
|
||||||
flow_style=b_value.flow_style,
|
flow_style=last_node_value.flow_style,
|
||||||
comment=b_value.comment,
|
comment=last_node_value.comment,
|
||||||
anchor=b_value.anchor,
|
anchor=last_node_value.anchor,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
# If we're dealing with SequenceNodes, merge by appending one sequence to the other.
|
)
|
||||||
elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode):
|
|
||||||
# A "!retain" tag says to skip deep merging for this node. Replace the tag so
|
|
||||||
# downstream schema validation doesn't break on our application-specific tag.
|
|
||||||
if b_value.tag == '!retain':
|
|
||||||
b_value.tag = 'tag:yaml.org,2002:seq'
|
|
||||||
else:
|
|
||||||
replaced_nodes[(b_key, b_value)] = (
|
|
||||||
b_key,
|
|
||||||
ruamel.yaml.nodes.SequenceNode(
|
|
||||||
tag=b_value.tag,
|
|
||||||
value=filter_omitted_nodes(a_value.value + b_value.value),
|
|
||||||
start_mark=b_value.start_mark,
|
|
||||||
end_mark=b_value.end_mark,
|
|
||||||
flow_style=b_value.flow_style,
|
|
||||||
comment=b_value.comment,
|
|
||||||
anchor=b_value.anchor,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
return [
|
continue
|
||||||
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
|
|
||||||
]
|
merged_nodes.append((last_node_key, last_node_value))
|
||||||
|
|
||||||
|
return merged_nodes
|
||||||
|
|
|
@ -10,7 +10,11 @@ def normalize_sections(config_filename, config):
|
||||||
|
|
||||||
Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections.
|
Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections.
|
||||||
'''
|
'''
|
||||||
location = config.get('location') or {}
|
try:
|
||||||
|
location = config.get('location') or {}
|
||||||
|
except AttributeError:
|
||||||
|
raise ValueError('Configuration does not contain any options')
|
||||||
|
|
||||||
storage = config.get('storage') or {}
|
storage = config.get('storage') or {}
|
||||||
consistency = config.get('consistency') or {}
|
consistency = config.get('consistency') or {}
|
||||||
hooks = config.get('hooks') or {}
|
hooks = config.get('hooks') or {}
|
||||||
|
|
|
@ -233,18 +233,57 @@ checks:
|
||||||
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
|
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> These
|
||||||
options were organized into sections like `retention:` and `consistency:`.
|
options were organized into sections like `retention:` and `consistency:`.
|
||||||
|
|
||||||
Once this include gets merged in, the resulting configuration would have all
|
Once this include gets merged in, the resulting configuration has all of the
|
||||||
of the options from the original configuration file *and* the options from the
|
options from the original configuration file *and* the options from the
|
||||||
include.
|
include.
|
||||||
|
|
||||||
Note that this `<<` include merging syntax is only for merging in mappings
|
Note that this `<<` include merging syntax is only for merging in mappings
|
||||||
(configuration options and their values). But if you'd like to include a
|
(configuration options and their values). If you'd like to include a single
|
||||||
single value directly, please see the above about standard includes.
|
value directly, please see above about standard includes.
|
||||||
|
|
||||||
Additionally, there is a limitation preventing multiple `<<` include merges
|
|
||||||
per file or option value. So for instance, that means you can do one `<<`
|
### Multiple merge includes
|
||||||
merge at the global level, another `<<` within each nested option value, etc.
|
|
||||||
(This is a YAML limitation.)
|
borgmatic has a limitation preventing multiple `<<` include merges per file or
|
||||||
|
option value. This means you can do a single `<<` merge at the global level,
|
||||||
|
another `<<` within each nested option value, etc. (This is a YAML
|
||||||
|
limitation.) For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
repositories:
|
||||||
|
- path: repo.borg
|
||||||
|
|
||||||
|
# This won't work! You can't do multiple merges like this at the same level.
|
||||||
|
<<: !include common1.yaml
|
||||||
|
<<: !include common2.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
But read on for a way around this.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.8.1</span> You can
|
||||||
|
include and merge multiple configuration files all at once. For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
repositories:
|
||||||
|
- path: repo.borg
|
||||||
|
|
||||||
|
<<: !include [common1.yaml, common2.yaml, common3.yaml]
|
||||||
|
```
|
||||||
|
|
||||||
|
This merges in each included configuration file in turn, such that later files
|
||||||
|
replace the options in earlier ones.
|
||||||
|
|
||||||
|
Here's another way to do the same thing:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
repositories:
|
||||||
|
- path: repo.borg
|
||||||
|
|
||||||
|
<<: !include
|
||||||
|
- common1.yaml
|
||||||
|
- common2.yaml
|
||||||
|
- common3.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Deep merge
|
### Deep merge
|
||||||
|
@ -309,8 +348,8 @@ source_directories:
|
||||||
- /etc
|
- /etc
|
||||||
```
|
```
|
||||||
|
|
||||||
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put
|
<span class="minilink minilink-addedin">Prior to version 1.8.0</span> Put the
|
||||||
this option in the `location:` section of your configuration.
|
`source_directories` option in the `location:` section of your configuration.
|
||||||
|
|
||||||
Once this include gets merged in, the resulting configuration will have a
|
Once this include gets merged in, the resulting configuration will have a
|
||||||
`source_directories` value of `/etc` and `/var`—with `/home` omitted.
|
`source_directories` value of `/etc` and `/var`—with `/home` omitted.
|
||||||
|
|
|
@ -44,6 +44,14 @@ def test_load_configuration_replaces_complex_constants():
|
||||||
assert module.load_configuration('config.yaml') == {'key': {'subkey': 'value'}}
|
assert module.load_configuration('config.yaml') == {'key': {'subkey': 'value'}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_with_only_integer_value_does_not_raise():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
config_file = io.StringIO('33')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
assert module.load_configuration('config.yaml') == 33
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_inlines_include_relative_to_current_directory():
|
def test_load_configuration_inlines_include_relative_to_current_directory():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
@ -124,6 +132,37 @@ def test_load_configuration_raises_if_absolute_include_does_not_exist():
|
||||||
assert module.load_configuration('config.yaml')
|
assert module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_inlines_multiple_file_include_as_list():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('exists').never()
|
||||||
|
include1_file = io.StringIO('value1')
|
||||||
|
include1_file.name = '/root/include1.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/root/include1.yaml').and_return(include1_file)
|
||||||
|
include2_file = io.StringIO('value2')
|
||||||
|
include2_file.name = '/root/include2.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/root/include2.yaml').and_return(include2_file)
|
||||||
|
config_file = io.StringIO('key: !include [/root/include1.yaml, /root/include2.yaml]')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {'key': ['value2', 'value1']}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_include_with_unsupported_filename_type_raises():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('exists').never()
|
||||||
|
config_file = io.StringIO('key: !include {path: /root/include.yaml}')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_merges_include():
|
def test_load_configuration_merges_include():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
@ -149,6 +188,43 @@ def test_load_configuration_merges_include():
|
||||||
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_merges_multiple_file_include():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include1_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
foo: bar
|
||||||
|
baz: quux
|
||||||
|
original: yes
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include1_file.name = 'include1.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include1.yaml').and_return(include1_file)
|
||||||
|
include2_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
baz: second
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
include2_file.name = 'include2.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include2.yaml').and_return(include2_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
|
'''
|
||||||
|
foo: override
|
||||||
|
<<: !include [include1.yaml, include2.yaml]
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {
|
||||||
|
'foo': 'override',
|
||||||
|
'baz': 'second',
|
||||||
|
'original': 'yes',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_with_retain_tag_merges_include_but_keeps_local_values():
|
def test_load_configuration_with_retain_tag_merges_include_but_keeps_local_values():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
@ -438,8 +514,9 @@ def test_raise_omit_node_error_raises():
|
||||||
module.raise_omit_node_error(loader=flexmock(), node=flexmock())
|
module.raise_omit_node_error(loader=flexmock(), node=flexmock())
|
||||||
|
|
||||||
|
|
||||||
def test_filter_omitted_nodes():
|
def test_filter_omitted_nodes_discards_values_with_omit_tag_and_also_equal_values():
|
||||||
nodes = [
|
nodes = [flexmock(), flexmock()]
|
||||||
|
values = [
|
||||||
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
||||||
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='b'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='b'),
|
||||||
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
|
@ -448,11 +525,139 @@ def test_filter_omitted_nodes():
|
||||||
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
]
|
]
|
||||||
|
|
||||||
result = module.filter_omitted_nodes(nodes)
|
result = module.filter_omitted_nodes(nodes, values)
|
||||||
|
|
||||||
assert [item.value for item in result] == ['a', 'c', 'a', 'c']
|
assert [item.value for item in result] == ['a', 'c', 'a', 'c']
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_omitted_nodes_keeps_all_values_when_given_only_one_node():
|
||||||
|
nodes = [flexmock()]
|
||||||
|
values = [
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='b'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='!omit', value='b'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.filter_omitted_nodes(nodes, values)
|
||||||
|
|
||||||
|
assert [item.value for item in result] == ['a', 'b', 'c', 'a', 'b', 'c']
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_values_combines_mapping_values():
|
||||||
|
nodes = [
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='24'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='25'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_nanosecondly'
|
||||||
|
),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value='1000'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
values = module.merge_values(nodes)
|
||||||
|
|
||||||
|
assert len(values) == 4
|
||||||
|
assert values[0][0].value == 'keep_hourly'
|
||||||
|
assert values[0][1].value == '24'
|
||||||
|
assert values[1][0].value == 'keep_daily'
|
||||||
|
assert values[1][1].value == '7'
|
||||||
|
assert values[2][0].value == 'keep_daily'
|
||||||
|
assert values[2][1].value == '25'
|
||||||
|
assert values[3][0].value == 'keep_nanosecondly'
|
||||||
|
assert values[3][1].value == '1000'
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_values_combines_sequence_values():
|
||||||
|
nodes = [
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='1'),
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='2'),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='3'),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='option'),
|
||||||
|
module.ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:seq',
|
||||||
|
value=[
|
||||||
|
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='4'),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
values = module.merge_values(nodes)
|
||||||
|
|
||||||
|
assert len(values) == 4
|
||||||
|
assert values[0].value == '1'
|
||||||
|
assert values[1].value == '2'
|
||||||
|
assert values[2].value == '3'
|
||||||
|
assert values[3].value == '4'
|
||||||
|
|
||||||
|
|
||||||
def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
||||||
node_values = [
|
node_values = [
|
||||||
(
|
(
|
||||||
|
@ -499,10 +704,10 @@ def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
||||||
assert section_key.value == 'retention'
|
assert section_key.value == 'retention'
|
||||||
options = section_value.value
|
options = section_value.value
|
||||||
assert len(options) == 2
|
assert len(options) == 2
|
||||||
assert options[0][0].value == 'keep_hourly'
|
assert options[0][0].value == 'keep_daily'
|
||||||
assert options[0][1].value == '24'
|
assert options[0][1].value == '5'
|
||||||
assert options[1][0].value == 'keep_daily'
|
assert options[1][0].value == 'keep_hourly'
|
||||||
assert options[1][1].value == '5'
|
assert options[1][1].value == '24'
|
||||||
|
|
||||||
|
|
||||||
def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
||||||
|
@ -553,10 +758,10 @@ def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
||||||
assert section_key.value == 'retention'
|
assert section_key.value == 'retention'
|
||||||
options = section_value.value
|
options = section_value.value
|
||||||
assert len(options) == 3
|
assert len(options) == 3
|
||||||
assert options[0][0].value == 'keep_hourly'
|
assert options[0][0].value == 'keep_daily'
|
||||||
assert options[0][1].value == '24'
|
assert options[0][1].value == '7'
|
||||||
assert options[1][0].value == 'keep_daily'
|
assert options[1][0].value == 'keep_hourly'
|
||||||
assert options[1][1].value == '7'
|
assert options[1][1].value == '24'
|
||||||
assert options[2][0].value == 'keep_minutely'
|
assert options[2][0].value == 'keep_minutely'
|
||||||
assert options[2][1].value == '10'
|
assert options[2][1].value == '10'
|
||||||
|
|
||||||
|
@ -629,10 +834,10 @@ def test_deep_merge_nodes_keeps_deeply_nested_values():
|
||||||
assert section_key.value == 'storage'
|
assert section_key.value == 'storage'
|
||||||
options = section_value.value
|
options = section_value.value
|
||||||
assert len(options) == 2
|
assert len(options) == 2
|
||||||
assert options[0][0].value == 'lock_wait'
|
assert options[0][0].value == 'extra_borg_options'
|
||||||
assert options[0][1].value == '5'
|
assert options[1][0].value == 'lock_wait'
|
||||||
assert options[1][0].value == 'extra_borg_options'
|
assert options[1][1].value == '5'
|
||||||
nested_options = options[1][1].value
|
nested_options = options[0][1].value
|
||||||
assert len(nested_options) == 2
|
assert len(nested_options) == 2
|
||||||
assert nested_options[0][0].value == 'init'
|
assert nested_options[0][0].value == 'init'
|
||||||
assert nested_options[0][1].value == '--init-option'
|
assert nested_options[0][1].value == '--init-option'
|
||||||
|
|
|
@ -117,7 +117,7 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
|
||||||
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_inlines_include():
|
def test_parse_configuration_inlines_include_inside_deprecated_section():
|
||||||
mock_config_and_schema(
|
mock_config_and_schema(
|
||||||
'''
|
'''
|
||||||
source_directories:
|
source_directories:
|
||||||
|
|
37
tests/unit/config/test_load.py
Normal file
37
tests/unit/config/test_load.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
import pytest
|
||||||
|
from flexmock import flexmock
|
||||||
|
|
||||||
|
from borgmatic.config import load as module
|
||||||
|
|
||||||
|
|
||||||
|
def test_probe_and_include_file_with_absolute_path_skips_probing():
|
||||||
|
config = flexmock()
|
||||||
|
flexmock(module).should_receive('load_configuration').with_args('/etc/include.yaml').and_return(
|
||||||
|
config
|
||||||
|
).once()
|
||||||
|
|
||||||
|
assert module.probe_and_include_file('/etc/include.yaml', ['/etc', '/var']) == config
|
||||||
|
|
||||||
|
|
||||||
|
def test_probe_and_include_file_with_relative_path_probes_include_directories():
|
||||||
|
config = flexmock()
|
||||||
|
flexmock(module.os.path).should_receive('exists').with_args('/etc/include.yaml').and_return(
|
||||||
|
False
|
||||||
|
)
|
||||||
|
flexmock(module.os.path).should_receive('exists').with_args('/var/include.yaml').and_return(
|
||||||
|
True
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('load_configuration').with_args('/etc/include.yaml').never()
|
||||||
|
flexmock(module).should_receive('load_configuration').with_args('/var/include.yaml').and_return(
|
||||||
|
config
|
||||||
|
).once()
|
||||||
|
|
||||||
|
assert module.probe_and_include_file('include.yaml', ['/etc', '/var']) == config
|
||||||
|
|
||||||
|
|
||||||
|
def test_probe_and_include_file_with_relative_path_and_missing_files_raises():
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||||
|
flexmock(module).should_receive('load_configuration').never()
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
module.probe_and_include_file('include.yaml', ['/etc', '/var'])
|
|
@ -111,6 +111,13 @@ def test_normalize_sections_with_different_umask_values_raises():
|
||||||
module.normalize_sections('test.yaml', config)
|
module.normalize_sections('test.yaml', config)
|
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_sections_with_only_scalar_raises():
|
||||||
|
config = 33
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.normalize_sections('test.yaml', config)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'config,expected_config,produces_logs',
|
'config,expected_config,produces_logs',
|
||||||
(
|
(
|
||||||
|
|
Loading…
Reference in a new issue