Better error messages! Switch the library used for validating configuration files (from pykwalify to jsonschema).

This commit is contained in:
Dan Helfman 2021-06-22 13:27:59 -07:00
parent 77a860cc62
commit 27d37b606b
12 changed files with 434 additions and 361 deletions

2
NEWS
View file

@ -1,4 +1,6 @@
1.5.16.dev0 1.5.16.dev0
* Better error messages! Switch the library used for validating configuration files (from pykwalify
to jsonschema).
* Link borgmatic Ansible role from installation documentation: * Link borgmatic Ansible role from installation documentation:
https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install

View file

@ -17,7 +17,7 @@ def _convert_section(source_section_config, section_schema):
( (
option_name, option_name,
int(option_value) int(option_value)
if section_schema['map'].get(option_name, {}).get('type') == 'int' if section_schema['properties'].get(option_name, {}).get('type') == 'integer'
else option_value, else option_value,
) )
for option_name, option_value in source_section_config.items() for option_name, option_value in source_section_config.items()
@ -38,7 +38,7 @@ def convert_legacy_parsed_config(source_config, source_excludes, schema):
''' '''
destination_config = yaml.comments.CommentedMap( destination_config = yaml.comments.CommentedMap(
[ [
(section_name, _convert_section(section_config, schema['map'][section_name])) (section_name, _convert_section(section_config, schema['properties'][section_name]))
for section_name, section_config in source_config._asdict().items() for section_name, section_config in source_config._asdict().items()
] ]
) )
@ -54,11 +54,11 @@ def convert_legacy_parsed_config(source_config, source_excludes, schema):
destination_config['consistency']['checks'] = source_config.consistency['checks'].split(' ') destination_config['consistency']['checks'] = source_config.consistency['checks'].split(' ')
# Add comments to each section, and then add comments to the fields in each section. # Add comments to each section, and then add comments to the fields in each section.
generate.add_comments_to_configuration_map(destination_config, schema) generate.add_comments_to_configuration_object(destination_config, schema)
for section_name, section_config in destination_config.items(): for section_name, section_config in destination_config.items():
generate.add_comments_to_configuration_map( generate.add_comments_to_configuration_object(
section_config, schema['map'][section_name], indent=generate.INDENT section_config, schema['properties'][section_name], indent=generate.INDENT
) )
return destination_config return destination_config

View file

@ -24,29 +24,27 @@ def _insert_newline_before_comment(config, field_name):
def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
''' '''
Given a loaded configuration schema, generate and return sample config for it. Include comments Given a loaded configuration schema, generate and return sample config for it. Include comments
for each section based on the schema "desc" description. for each section based on the schema "description".
''' '''
schema_type = schema.get('type')
example = schema.get('example') example = schema.get('example')
if example is not None: if example is not None:
return example return example
if 'seq' in schema: if schema_type == 'array':
config = yaml.comments.CommentedSeq( config = yaml.comments.CommentedSeq(
[ [_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
_schema_to_sample_configuration(item_schema, level, parent_is_sequence=True)
for item_schema in schema['seq']
]
) )
add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
elif 'map' in schema: elif schema_type == 'object':
config = yaml.comments.CommentedMap( config = yaml.comments.CommentedMap(
[ [
(field_name, _schema_to_sample_configuration(sub_schema, level + 1)) (field_name, _schema_to_sample_configuration(sub_schema, level + 1))
for field_name, sub_schema in schema['map'].items() for field_name, sub_schema in schema['properties'].items()
] ]
) )
indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0) indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0)
add_comments_to_configuration_map( add_comments_to_configuration_object(
config, schema, indent=indent, skip_first=parent_is_sequence config, schema, indent=indent, skip_first=parent_is_sequence
) )
else: else:
@ -132,8 +130,8 @@ def write_configuration(config_filename, rendered_config, mode=0o600):
def add_comments_to_configuration_sequence(config, schema, indent=0): def add_comments_to_configuration_sequence(config, schema, indent=0):
''' '''
If the given config sequence's items are maps, then mine the schema for the description of the If the given config sequence's items are object, then mine the schema for the description of the
map's first item, and slap that atop the sequence. Indent the comment the given number of object's first item, and slap that atop the sequence. Indent the comment the given number of
characters. characters.
Doing this for sequences of maps results in nice comments that look like: Doing this for sequences of maps results in nice comments that look like:
@ -142,16 +140,16 @@ def add_comments_to_configuration_sequence(config, schema, indent=0):
things: things:
# First key description. Added by this function. # First key description. Added by this function.
- key: foo - key: foo
# Second key description. Added by add_comments_to_configuration_map(). # Second key description. Added by add_comments_to_configuration_object().
other: bar other: bar
``` ```
''' '''
if 'map' not in schema['seq'][0]: if schema['items'].get('type') != 'object':
return return
for field_name in config[0].keys(): for field_name in config[0].keys():
field_schema = schema['seq'][0]['map'].get(field_name, {}) field_schema = schema['items']['properties'].get(field_name, {})
description = field_schema.get('desc') description = field_schema.get('description')
# No description to use? Skip it. # No description to use? Skip it.
if not field_schema or not description: if not field_schema or not description:
@ -160,7 +158,7 @@ def add_comments_to_configuration_sequence(config, schema, indent=0):
config[0].yaml_set_start_comment(description, indent=indent) config[0].yaml_set_start_comment(description, indent=indent)
# We only want the first key's description here, as the rest of the keys get commented by # We only want the first key's description here, as the rest of the keys get commented by
# add_comments_to_configuration_map(). # add_comments_to_configuration_object().
return return
@ -169,7 +167,7 @@ REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'}
COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' COMMENTED_OUT_SENTINEL = 'COMMENT_OUT'
def add_comments_to_configuration_map(config, schema, indent=0, skip_first=False): def add_comments_to_configuration_object(config, schema, indent=0, skip_first=False):
''' '''
Using descriptions from a schema as a source, add those descriptions as comments to the given Using descriptions from a schema as a source, add those descriptions as comments to the given
config mapping, before each field. Indent the comment the given number of characters. config mapping, before each field. Indent the comment the given number of characters.
@ -178,8 +176,8 @@ def add_comments_to_configuration_map(config, schema, indent=0, skip_first=False
if skip_first and index == 0: if skip_first and index == 0:
continue continue
field_schema = schema['map'].get(field_name, {}) field_schema = schema['properties'].get(field_name, {})
description = field_schema.get('desc', '').strip() description = field_schema.get('description', '').strip()
# If this is an optional key, add an indicator to the comment flagging it to be commented # If this is an optional key, add an indicator to the comment flagging it to be commented
# out from the sample configuration. This sentinel is consumed by downstream processing that # out from the sample configuration. This sentinel is consumed by downstream processing that
@ -268,9 +266,9 @@ def merge_source_configuration_into_destination(destination_config, source_confi
def generate_sample_configuration(source_filename, destination_filename, schema_filename): def generate_sample_configuration(source_filename, destination_filename, schema_filename):
''' '''
Given an optional source configuration filename, and a required destination configuration Given an optional source configuration filename, and a required destination configuration
filename, and the path to a schema filename in pykwalify YAML schema format, write out a filename, and the path to a schema filename in a YAML rendition of the JSON Schema format,
sample configuration file based on that schema. If a source filename is provided, merge the write out a sample configuration file based on that schema. If a source filename is provided,
parsed contents of that configuration into the generated configuration. merge the parsed contents of that configuration into the generated configuration.
''' '''
schema = yaml.round_trip_load(open(schema_filename)) schema = yaml.round_trip_load(open(schema_filename))
source_config = None source_config = None

View file

@ -1,19 +1,25 @@
name: Borgmatic configuration file schema type: object
version: 1 required:
map: - location
additionalProperties: false
properties:
location: location:
desc: | type: object
description: |
Where to look for files to backup, and where to store those backups. Where to look for files to backup, and where to store those backups.
See https://borgbackup.readthedocs.io/en/stable/quickstart.html and See https://borgbackup.readthedocs.io/en/stable/quickstart.html and
https://borgbackup.readthedocs.io/en/stable/usage/create.html https://borgbackup.readthedocs.io/en/stable/usage/create.html
for details. for details.
required: true required:
map: - source_directories
- repositories
additionalProperties: false
properties:
source_directories: source_directories:
required: true type: array
seq: items:
- type: str type: string
desc: | description: |
List of source directories to backup (required). Globs and List of source directories to backup (required). Globs and
tildes are expanded. Do not backslash spaces in path names. tildes are expanded. Do not backslash spaces in path names.
example: example:
@ -22,10 +28,10 @@ map:
- /var/log/syslog* - /var/log/syslog*
- /home/user/path with spaces - /home/user/path with spaces
repositories: repositories:
required: true type: array
seq: items:
- type: str type: string
desc: | description: |
Paths to local or remote repositories (required). Tildes are Paths to local or remote repositories (required). Tildes are
expanded. Multiple repositories are backed up to in expanded. Multiple repositories are backed up to in
sequence. Borg placeholders can be used. See the output of sequence. Borg placeholders can be used. See the output of
@ -37,36 +43,36 @@ map:
- user@backupserver:sourcehostname.borg - user@backupserver:sourcehostname.borg
- "user@backupserver:{fqdn}" - "user@backupserver:{fqdn}"
one_file_system: one_file_system:
type: bool type: boolean
desc: | description: |
Stay in same file system (do not cross mount points). Stay in same file system (do not cross mount points).
Defaults to false. But when a database hook is used, the Defaults to false. But when a database hook is used, the
setting here is ignored and one_file_system is considered setting here is ignored and one_file_system is considered
true. true.
example: true example: true
numeric_owner: numeric_owner:
type: bool type: boolean
desc: | description: |
Only store/extract numeric user and group identifiers. Only store/extract numeric user and group identifiers.
Defaults to false. Defaults to false.
example: true example: true
atime: atime:
type: bool type: boolean
desc: Store atime into archive. Defaults to true. description: Store atime into archive. Defaults to true.
example: false example: false
ctime: ctime:
type: bool type: boolean
desc: Store ctime into archive. Defaults to true. description: Store ctime into archive. Defaults to true.
example: false example: false
birthtime: birthtime:
type: bool type: boolean
desc: | description: |
Store birthtime (creation date) into archive. Defaults to Store birthtime (creation date) into archive. Defaults to
true. true.
example: false example: false
read_special: read_special:
type: bool type: boolean
desc: | description: |
Use Borg's --read-special flag to allow backup of block and Use Borg's --read-special flag to allow backup of block and
other special devices. Use with caution, as it will lead to other special devices. Use with caution, as it will lead to
problems if used when backing up special devices such as problems if used when backing up special devices such as
@ -75,30 +81,33 @@ map:
considered true. considered true.
example: false example: false
bsd_flags: bsd_flags:
type: bool type: boolean
desc: | description: |
Record bsdflags (e.g. NODUMP, IMMUTABLE) in archive. Record bsdflags (e.g. NODUMP, IMMUTABLE) in archive.
Defaults to true. Defaults to true.
example: true example: true
files_cache: files_cache:
type: str type: string
desc: | description: |
Mode in which to operate the files cache. See Mode in which to operate the files cache. See
http://borgbackup.readthedocs.io/en/stable/usage/create.html http://borgbackup.readthedocs.io/en/stable/usage/create.html
for details. Defaults to "ctime,size,inode". for details. Defaults to "ctime,size,inode".
example: ctime,size,inode example: ctime,size,inode
local_path: local_path:
type: str type: string
desc: Alternate Borg local executable. Defaults to "borg". description: |
Alternate Borg local executable. Defaults to "borg".
example: borg1 example: borg1
remote_path: remote_path:
type: str type: string
desc: Alternate Borg remote executable. Defaults to "borg". description: |
Alternate Borg remote executable. Defaults to "borg".
example: borg1 example: borg1
patterns: patterns:
seq: type: array
- type: str items:
desc: | type: string
description: |
Any paths matching these patterns are included/excluded from Any paths matching these patterns are included/excluded from
backups. Globs are expanded. (Tildes are not.) Note that backups. Globs are expanded. (Tildes are not.) Note that
Borg considers this option experimental. See the output of Borg considers this option experimental. See the output of
@ -110,9 +119,10 @@ map:
- '+ /home/susan' - '+ /home/susan'
- '- /home/*' - '- /home/*'
patterns_from: patterns_from:
seq: type: array
- type: str items:
desc: | type: string
description: |
Read include/exclude patterns from one or more separate Read include/exclude patterns from one or more separate
named files, one pattern per line. Note that Borg considers named files, one pattern per line. Note that Borg considers
this option experimental. See the output of "borg help this option experimental. See the output of "borg help
@ -120,9 +130,10 @@ map:
example: example:
- /etc/borgmatic/patterns - /etc/borgmatic/patterns
exclude_patterns: exclude_patterns:
seq: type: array
- type: str items:
desc: | type: string
description: |
Any paths matching these patterns are excluded from backups. Any paths matching these patterns are excluded from backups.
Globs and tildes are expanded. Do not backslash spaces in Globs and tildes are expanded. Do not backslash spaces in
path names. See the output of "borg help patterns" for more path names. See the output of "borg help patterns" for more
@ -133,59 +144,63 @@ map:
- /etc/ssl - /etc/ssl
- /home/user/path with spaces - /home/user/path with spaces
exclude_from: exclude_from:
seq: type: array
- type: str items:
desc: | type: string
description: |
Read exclude patterns from one or more separate named files, Read exclude patterns from one or more separate named files,
one pattern per line. See the output of "borg help patterns" one pattern per line. See the output of "borg help patterns"
for more details. for more details.
example: example:
- /etc/borgmatic/excludes - /etc/borgmatic/excludes
exclude_caches: exclude_caches:
type: bool type: boolean
desc: | description: |
Exclude directories that contain a CACHEDIR.TAG file. See Exclude directories that contain a CACHEDIR.TAG file. See
http://www.brynosaurus.com/cachedir/spec.html for details. http://www.brynosaurus.com/cachedir/spec.html for details.
Defaults to false. Defaults to false.
example: true example: true
exclude_if_present: exclude_if_present:
seq: type: array
- type: str items:
desc: | type: string
description: |
Exclude directories that contain a file with the given Exclude directories that contain a file with the given
filenames. Defaults to not set. filenames. Defaults to not set.
example: example:
- .nobackup - .nobackup
keep_exclude_tags: keep_exclude_tags:
type: bool type: boolean
desc: | description: |
If true, the exclude_if_present filename is included in If true, the exclude_if_present filename is included in
backups. Defaults to false, meaning that the backups. Defaults to false, meaning that the
exclude_if_present filename is omitted from backups. exclude_if_present filename is omitted from backups.
example: true example: true
exclude_nodump: exclude_nodump:
type: bool type: boolean
desc: | description: |
Exclude files with the NODUMP flag. Defaults to false. Exclude files with the NODUMP flag. Defaults to false.
example: true example: true
borgmatic_source_directory: borgmatic_source_directory:
type: str type: string
desc: | description: |
Path for additional source files used for temporary internal Path for additional source files used for temporary internal
state like borgmatic database dumps. Note that changing this state like borgmatic database dumps. Note that changing this
path prevents "borgmatic restore" from finding any database path prevents "borgmatic restore" from finding any database
dumps created before the change. Defaults to ~/.borgmatic dumps created before the change. Defaults to ~/.borgmatic
example: /tmp/borgmatic example: /tmp/borgmatic
storage: storage:
desc: | type: object
description: |
Repository storage options. See Repository storage options. See
https://borgbackup.readthedocs.io/en/stable/usage/create.html and https://borgbackup.readthedocs.io/en/stable/usage/create.html and
https://borgbackup.readthedocs.io/en/stable/usage/general.html for https://borgbackup.readthedocs.io/en/stable/usage/general.html for
details. details.
map: additionalProperties: false
properties:
encryption_passcommand: encryption_passcommand:
type: str type: string
desc: | description: |
The standard output of this command is used to unlock the The standard output of this command is used to unlock the
encryption key. Only use on repositories that were encryption key. Only use on repositories that were
initialized with passcommand/repokey/keyfile encryption. initialized with passcommand/repokey/keyfile encryption.
@ -194,8 +209,8 @@ map:
takes precedence. Defaults to not set. takes precedence. Defaults to not set.
example: "secret-tool lookup borg-repository repo-name" example: "secret-tool lookup borg-repository repo-name"
encryption_passphrase: encryption_passphrase:
type: str type: string
desc: | description: |
Passphrase to unlock the encryption key with. Only use on Passphrase to unlock the encryption key with. Only use on
repositories that were initialized with repositories that were initialized with
passphrase/repokey/keyfile encryption. Quote the value if it passphrase/repokey/keyfile encryption. Quote the value if it
@ -204,8 +219,8 @@ map:
set. set.
example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
checkpoint_interval: checkpoint_interval:
type: int type: integer
desc: | description: |
Number of seconds between each checkpoint during a Number of seconds between each checkpoint during a
long-running backup. See long-running backup. See
https://borgbackup.readthedocs.io/en/stable/faq.html https://borgbackup.readthedocs.io/en/stable/faq.html
@ -213,8 +228,8 @@ map:
minutes). minutes).
example: 1800 example: 1800
chunker_params: chunker_params:
type: str type: string
desc: | description: |
Specify the parameters passed to then chunker Specify the parameters passed to then chunker
(CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS,
HASH_WINDOW_SIZE). See HASH_WINDOW_SIZE). See
@ -222,73 +237,73 @@ map:
for details. Defaults to "19,23,21,4095". for details. Defaults to "19,23,21,4095".
example: 19,23,21,4095 example: 19,23,21,4095
compression: compression:
type: str type: string
desc: | description: |
Type of compression to use when creating archives. See Type of compression to use when creating archives. See
http://borgbackup.readthedocs.io/en/stable/usage/create.html http://borgbackup.readthedocs.io/en/stable/usage/create.html
for details. Defaults to "lz4". for details. Defaults to "lz4".
example: lz4 example: lz4
remote_rate_limit: remote_rate_limit:
type: int type: integer
desc: | description: |
Remote network upload rate limit in kiBytes/second. Defaults Remote network upload rate limit in kiBytes/second. Defaults
to unlimited. to unlimited.
example: 100 example: 100
temporary_directory: temporary_directory:
type: str type: string
desc: | description: |
Directory where temporary files are stored. Defaults to Directory where temporary files are stored. Defaults to
$TMPDIR $TMPDIR
example: /path/to/tmpdir example: /path/to/tmpdir
ssh_command: ssh_command:
type: str type: string
desc: | description: |
Command to use instead of "ssh". This can be used to specify Command to use instead of "ssh". This can be used to specify
ssh options. Defaults to not set. ssh options. Defaults to not set.
example: ssh -i /path/to/private/key example: ssh -i /path/to/private/key
borg_base_directory: borg_base_directory:
type: str type: string
desc: | description: |
Base path used for various Borg directories. Defaults to Base path used for various Borg directories. Defaults to
$HOME, ~$USER, or ~. $HOME, ~$USER, or ~.
example: /path/to/base example: /path/to/base
borg_config_directory: borg_config_directory:
type: str type: string
desc: | description: |
Path for Borg configuration files. Defaults to Path for Borg configuration files. Defaults to
$borg_base_directory/.config/borg $borg_base_directory/.config/borg
example: /path/to/base/config example: /path/to/base/config
borg_cache_directory: borg_cache_directory:
type: str type: string
desc: | description: |
Path for Borg cache files. Defaults to Path for Borg cache files. Defaults to
$borg_base_directory/.cache/borg $borg_base_directory/.cache/borg
example: /path/to/base/cache example: /path/to/base/cache
borg_security_directory: borg_security_directory:
type: str type: string
desc: | description: |
Path for Borg security and encryption nonce files. Defaults Path for Borg security and encryption nonce files. Defaults
to $borg_base_directory/.config/borg/security to $borg_base_directory/.config/borg/security
example: /path/to/base/config/security example: /path/to/base/config/security
borg_keys_directory: borg_keys_directory:
type: str type: string
desc: | description: |
Path for Borg encryption key files. Defaults to Path for Borg encryption key files. Defaults to
$borg_base_directory/.config/borg/keys $borg_base_directory/.config/borg/keys
example: /path/to/base/config/keys example: /path/to/base/config/keys
umask: umask:
type: scalar type: string
desc: Umask to be used for borg create. Defaults to 0077. description: Umask to be used for borg create. Defaults to 0077.
example: 0077 example: 0077
lock_wait: lock_wait:
type: int type: integer
desc: | description: |
Maximum seconds to wait for acquiring a repository/cache Maximum seconds to wait for acquiring a repository/cache
lock. Defaults to 1. lock. Defaults to 1.
example: 5 example: 5
archive_name_format: archive_name_format:
type: str type: string
desc: | description: |
Name of the archive. Borg placeholders can be used. See the Name of the archive. Borg placeholders can be used. See the
output of "borg help placeholders" for details. Defaults to output of "borg help placeholders" for details. Defaults to
"{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this
@ -298,40 +313,42 @@ map:
prefix in the consistency section as well. prefix in the consistency section as well.
example: "{hostname}-documents-{now}" example: "{hostname}-documents-{now}"
relocated_repo_access_is_ok: relocated_repo_access_is_ok:
type: bool type: boolean
desc: | description: |
Bypass Borg error about a repository that has been moved. Bypass Borg error about a repository that has been moved.
Defaults to false. Defaults to false.
example: true example: true
unknown_unencrypted_repo_access_is_ok: unknown_unencrypted_repo_access_is_ok:
type: bool type: boolean
desc: | description: |
Bypass Borg error about a previously unknown unencrypted Bypass Borg error about a previously unknown unencrypted
repository. Defaults to false. repository. Defaults to false.
example: true example: true
extra_borg_options: extra_borg_options:
map: type: object
additionalProperties: false
properties:
init: init:
type: str type: string
desc: | description: |
Extra command-line options to pass to "borg init". Extra command-line options to pass to "borg init".
example: "--make-parent-dirs" example: "--make-parent-dirs"
prune: prune:
type: str type: string
desc: | description: |
Extra command-line options to pass to "borg prune". Extra command-line options to pass to "borg prune".
example: "--save-space" example: "--save-space"
create: create:
type: str type: string
desc: | description: |
Extra command-line options to pass to "borg create". Extra command-line options to pass to "borg create".
example: "--no-files-cache" example: "--no-files-cache"
check: check:
type: str type: string
desc: | description: |
Extra command-line options to pass to "borg check". Extra command-line options to pass to "borg check".
example: "--save-space" example: "--save-space"
desc: | description: |
Additional options to pass directly to particular Borg Additional options to pass directly to particular Borg
commands, handy for Borg options that borgmatic does not yet commands, handy for Borg options that borgmatic does not yet
support natively. Note that borgmatic does not perform any support natively. Note that borgmatic does not perform any
@ -339,72 +356,76 @@ map:
"--verbosity 2" shows the exact Borg command-line "--verbosity 2" shows the exact Borg command-line
invocation. invocation.
retention: retention:
desc: | type: object
description: |
Retention policy for how many backups to keep in each category. See Retention policy for how many backups to keep in each category. See
https://borgbackup.readthedocs.io/en/stable/usage/prune.html for https://borgbackup.readthedocs.io/en/stable/usage/prune.html for
details. At least one of the "keep" options is required for pruning details. At least one of the "keep" options is required for pruning
to work. To skip pruning entirely, run "borgmatic create" or "check" to work. To skip pruning entirely, run "borgmatic create" or "check"
without the "prune" action. See borgmatic documentation for details. without the "prune" action. See borgmatic documentation for details.
map: additionalProperties: false
properties:
keep_within: keep_within:
type: str type: string
desc: Keep all archives within this time interval. description: Keep all archives within this time interval.
example: 3H example: 3H
keep_secondly: keep_secondly:
type: int type: integer
desc: Number of secondly archives to keep. description: Number of secondly archives to keep.
example: 60 example: 60
keep_minutely: keep_minutely:
type: int type: integer
desc: Number of minutely archives to keep. description: Number of minutely archives to keep.
example: 60 example: 60
keep_hourly: keep_hourly:
type: int type: integer
desc: Number of hourly archives to keep. description: Number of hourly archives to keep.
example: 24 example: 24
keep_daily: keep_daily:
type: int type: integer
desc: Number of daily archives to keep. description: Number of daily archives to keep.
example: 7 example: 7
keep_weekly: keep_weekly:
type: int type: integer
desc: Number of weekly archives to keep. description: Number of weekly archives to keep.
example: 4 example: 4
keep_monthly: keep_monthly:
type: int type: integer
desc: Number of monthly archives to keep. description: Number of monthly archives to keep.
example: 6 example: 6
keep_yearly: keep_yearly:
type: int type: integer
desc: Number of yearly archives to keep. description: Number of yearly archives to keep.
example: 1 example: 1
prefix: prefix:
type: str type: string
desc: | description: |
When pruning, only consider archive names starting with this When pruning, only consider archive names starting with this
prefix. Borg placeholders can be used. See the output of prefix. Borg placeholders can be used. See the output of
"borg help placeholders" for details. Defaults to "borg help placeholders" for details. Defaults to
"{hostname}-". Use an empty value to disable the default. "{hostname}-". Use an empty value to disable the default.
example: sourcehostname example: sourcehostname
consistency: consistency:
desc: | type: object
description: |
Consistency checks to run after backups. See Consistency checks to run after backups. See
https://borgbackup.readthedocs.io/en/stable/usage/check.html and https://borgbackup.readthedocs.io/en/stable/usage/check.html and
https://borgbackup.readthedocs.io/en/stable/usage/extract.html for https://borgbackup.readthedocs.io/en/stable/usage/extract.html for
details. details.
map: additionalProperties: false
properties:
checks: checks:
seq: type: array
- type: str items:
enum: [ type: string
'repository', enum:
'archives', - repository
'data', - archives
'extract', - data
'disabled' - extract
] - disabled
unique: true uniqueItems: true
desc: | description: |
List of one or more consistency checks to run: "repository", List of one or more consistency checks to run: "repository",
"archives", "data", and/or "extract". Defaults to "archives", "data", and/or "extract". Defaults to
"repository" and "archives". Set to "disabled" to disable "repository" and "archives". Set to "disabled" to disable
@ -417,9 +438,10 @@ map:
- repository - repository
- archives - archives
check_repositories: check_repositories:
seq: type: array
- type: str items:
desc: | type: string
description: |
Paths to a subset of the repositories in the location Paths to a subset of the repositories in the location
section on which to run consistency checks. Handy in case section on which to run consistency checks. Handy in case
some of your repositories are very large, and so running some of your repositories are very large, and so running
@ -429,15 +451,15 @@ map:
example: example:
- user@backupserver:sourcehostname.borg - user@backupserver:sourcehostname.borg
check_last: check_last:
type: int type: integer
desc: | description: |
Restrict the number of checked archives to the last n. Restrict the number of checked archives to the last n.
Applies only to the "archives" check. Defaults to checking Applies only to the "archives" check. Defaults to checking
all archives. all archives.
example: 3 example: 3
prefix: prefix:
type: str type: string
desc: | description: |
When performing the "archives" check, only consider archive When performing the "archives" check, only consider archive
names starting with this prefix. Borg placeholders can be names starting with this prefix. Borg placeholders can be
used. See the output of "borg help placeholders" for used. See the output of "borg help placeholders" for
@ -445,101 +467,115 @@ map:
disable the default. disable the default.
example: sourcehostname example: sourcehostname
output: output:
desc: | type: object
description: |
Options for customizing borgmatic's own output and logging. Options for customizing borgmatic's own output and logging.
map: additionalProperties: false
properties:
color: color:
type: bool type: boolean
desc: | description: |
Apply color to console output. Can be overridden with Apply color to console output. Can be overridden with
--no-color command-line flag. Defaults to true. --no-color command-line flag. Defaults to true.
example: false example: false
hooks: hooks:
desc: | type: object
description: |
Shell commands, scripts, or integrations to execute at various Shell commands, scripts, or integrations to execute at various
points during a borgmatic run. IMPORTANT: All provided commands and points during a borgmatic run. IMPORTANT: All provided commands and
scripts are executed with user permissions of borgmatic. Do not scripts are executed with user permissions of borgmatic. Do not
forget to set secure permissions on this configuration file (chmod forget to set secure permissions on this configuration file (chmod
0600) as well as on any script called from a hook (chmod 0700) to 0600) as well as on any script called from a hook (chmod 0700) to
prevent potential shell injection or privilege escalation. prevent potential shell injection or privilege escalation.
map: additionalProperties: false
properties:
before_backup: before_backup:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
before creating a backup, run once per configuration file. before creating a backup, run once per configuration file.
example: example:
- echo "Starting a backup." - echo "Starting a backup."
before_prune: before_prune:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
before pruning, run once per configuration file. before pruning, run once per configuration file.
example: example:
- echo "Starting pruning." - echo "Starting pruning."
before_check: before_check:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
before consistency checks, run once per configuration file. before consistency checks, run once per configuration file.
example: example:
- echo "Starting checks." - echo "Starting checks."
before_extract: before_extract:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
before extracting a backup, run once per configuration file. before extracting a backup, run once per configuration file.
example: example:
- echo "Starting extracting." - echo "Starting extracting."
after_backup: after_backup:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
after creating a backup, run once per configuration file. after creating a backup, run once per configuration file.
example: example:
- echo "Finished a backup." - echo "Finished a backup."
after_prune: after_prune:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
after pruning, run once per configuration file. after pruning, run once per configuration file.
example: example:
- echo "Finished pruning." - echo "Finished pruning."
after_check: after_check:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
after consistency checks, run once per configuration file. after consistency checks, run once per configuration file.
example: example:
- echo "Finished checks." - echo "Finished checks."
after_extract: after_extract:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
after extracting a backup, run once per configuration file. after extracting a backup, run once per configuration file.
example: example:
- echo "Finished extracting." - echo "Finished extracting."
on_error: on_error:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
when an exception occurs during a "prune", "create", or when an exception occurs during a "prune", "create", or
"check" action or an associated before/after hook. "check" action or an associated before/after hook.
example: example:
- echo "Error during prune/create/check." - echo "Error during prune/create/check."
before_everything: before_everything:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
before running all actions (if one of them is "create"). before running all actions (if one of them is "create").
These are collected from all configuration files and then These are collected from all configuration files and then
@ -547,9 +583,10 @@ map:
example: example:
- echo "Starting actions." - echo "Starting actions."
after_everything: after_everything:
seq: type: array
- type: str items:
desc: | type: string
description: |
List of one or more shell commands or scripts to execute List of one or more shell commands or scripts to execute
after running all actions (if one of them is "create"). after running all actions (if one of them is "create").
These are collected from all configuration files and then These are collected from all configuration files and then
@ -557,12 +594,15 @@ map:
example: example:
- echo "Completed actions." - echo "Completed actions."
postgresql_databases: postgresql_databases:
seq: type: array
- map: items:
type: object
required: ['name']
additionalProperties: false
properties:
name: name:
required: true type: string
type: str description: |
desc: |
Database name (required if using this hook). Or Database name (required if using this hook). Or
"all" to dump all databases on the host. Note "all" to dump all databases on the host. Note
that using this database hook implicitly enables that using this database hook implicitly enables
@ -570,26 +610,26 @@ map:
above) to support dump and restore streaming. above) to support dump and restore streaming.
example: users example: users
hostname: hostname:
type: str type: string
desc: | description: |
Database hostname to connect to. Defaults to Database hostname to connect to. Defaults to
connecting via local Unix socket. connecting via local Unix socket.
example: database.example.org example: database.example.org
port: port:
type: int type: integer
desc: Port to connect to. Defaults to 5432. description: Port to connect to. Defaults to 5432.
example: 5433 example: 5433
username: username:
type: str type: string
desc: | description: |
Username with which to connect to the database. Username with which to connect to the database.
Defaults to the username of the current user. Defaults to the username of the current user.
You probably want to specify the "postgres" You probably want to specify the "postgres"
superuser here when the database name is "all". superuser here when the database name is "all".
example: dbuser example: dbuser
password: password:
type: str type: string
desc: | description: |
Password with which to connect to the database. Password with which to connect to the database.
Omitting a password will only work if PostgreSQL Omitting a password will only work if PostgreSQL
is configured to trust the configured username is configured to trust the configured username
@ -597,9 +637,9 @@ map:
file. file.
example: trustsome1 example: trustsome1
format: format:
type: str type: string
enum: ['plain', 'custom', 'directory', 'tar'] enum: ['plain', 'custom', 'directory', 'tar']
desc: | description: |
Database dump output format. One of "plain", Database dump output format. One of "plain",
"custom", "directory", or "tar". Defaults to "custom", "directory", or "tar". Defaults to
"custom" (unlike raw pg_dump). See pg_dump "custom" (unlike raw pg_dump). See pg_dump
@ -607,45 +647,45 @@ map:
ignored when the database name is "all". ignored when the database name is "all".
example: directory example: directory
ssl_mode: ssl_mode:
type: str type: string
enum: ['disable', 'allow', 'prefer', enum: ['disable', 'allow', 'prefer',
'require', 'verify-ca', 'verify-full'] 'require', 'verify-ca', 'verify-full']
desc: | description: |
SSL mode to use to connect to the database SSL mode to use to connect to the database
server. One of "disable", "allow", "prefer", server. One of "disable", "allow", "prefer",
"require", "verify-ca" or "verify-full". "require", "verify-ca" or "verify-full".
Defaults to "disable". Defaults to "disable".
example: require example: require
ssl_cert: ssl_cert:
type: str type: string
desc: | description: |
Path to a client certificate. Path to a client certificate.
example: "/root/.postgresql/postgresql.crt" example: "/root/.postgresql/postgresql.crt"
ssl_key: ssl_key:
type: str type: string
desc: | description: |
Path to a private client key. Path to a private client key.
example: "/root/.postgresql/postgresql.key" example: "/root/.postgresql/postgresql.key"
ssl_root_cert: ssl_root_cert:
type: str type: string
desc: | description: |
Path to a root certificate containing a list of Path to a root certificate containing a list of
trusted certificate authorities. trusted certificate authorities.
example: "/root/.postgresql/root.crt" example: "/root/.postgresql/root.crt"
ssl_crl: ssl_crl:
type: str type: string
desc: | description: |
Path to a certificate revocation list. Path to a certificate revocation list.
example: "/root/.postgresql/root.crl" example: "/root/.postgresql/root.crl"
options: options:
type: str type: string
desc: | description: |
Additional pg_dump/pg_dumpall options to pass Additional pg_dump/pg_dumpall options to pass
directly to the dump command, without performing directly to the dump command, without performing
any validation on them. See pg_dump any validation on them. See pg_dump
documentation for details. documentation for details.
example: --role=someone example: --role=someone
desc: | description: |
List of one or more PostgreSQL databases to dump before List of one or more PostgreSQL databases to dump before
creating a backup, run once per configuration file. The creating a backup, run once per configuration file. The
database dumps are added to your source directories at database dumps are added to your source directories at
@ -655,12 +695,15 @@ map:
https://www.postgresql.org/docs/current/libpq-ssl.html for https://www.postgresql.org/docs/current/libpq-ssl.html for
details. details.
mysql_databases: mysql_databases:
seq: type: array
- map: items:
type: object
required: ['name']
additionalProperties: false
properties:
name: name:
required: true type: string
type: str description: |
desc: |
Database name (required if using this hook). Or Database name (required if using this hook). Or
"all" to dump all databases on the host. Note "all" to dump all databases on the host. Note
that using this database hook implicitly enables that using this database hook implicitly enables
@ -668,38 +711,38 @@ map:
above) to support dump and restore streaming. above) to support dump and restore streaming.
example: users example: users
hostname: hostname:
type: str type: string
desc: | description: |
Database hostname to connect to. Defaults to Database hostname to connect to. Defaults to
connecting via local Unix socket. connecting via local Unix socket.
example: database.example.org example: database.example.org
port: port:
type: int type: integer
desc: Port to connect to. Defaults to 3306. description: Port to connect to. Defaults to 3306.
example: 3307 example: 3307
username: username:
type: str type: string
desc: | description: |
Username with which to connect to the database. Username with which to connect to the database.
Defaults to the username of the current user. Defaults to the username of the current user.
example: dbuser example: dbuser
password: password:
type: str type: string
desc: | description: |
Password with which to connect to the database. Password with which to connect to the database.
Omitting a password will only work if MySQL is Omitting a password will only work if MySQL is
configured to trust the configured username configured to trust the configured username
without a password. without a password.
example: trustsome1 example: trustsome1
options: options:
type: str type: string
desc: | description: |
Additional mysqldump options to pass directly to Additional mysqldump options to pass directly to
the dump command, without performing any the dump command, without performing any
validation on them. See mysqldump documentation validation on them. See mysqldump documentation
for details. for details.
example: --skip-comments example: --skip-comments
desc: | description: |
List of one or more MySQL/MariaDB databases to dump before List of one or more MySQL/MariaDB databases to dump before
creating a backup, run once per configuration file. The creating a backup, run once per configuration file. The
database dumps are added to your source directories at database dumps are added to your source directories at
@ -708,8 +751,8 @@ map:
https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or
https://mariadb.com/kb/en/library/mysqldump/ for details. https://mariadb.com/kb/en/library/mysqldump/ for details.
healthchecks: healthchecks:
type: str type: string
desc: | description: |
Healthchecks ping URL or UUID to notify when a backup Healthchecks ping URL or UUID to notify when a backup
begins, ends, or errors. Create an account at begins, ends, or errors. Create an account at
https://healthchecks.io if you'd like to use this service. https://healthchecks.io if you'd like to use this service.
@ -717,8 +760,8 @@ map:
example: example:
https://hc-ping.com/your-uuid-here https://hc-ping.com/your-uuid-here
cronitor: cronitor:
type: str type: string
desc: | description: |
Cronitor ping URL to notify when a backup begins, ends, or Cronitor ping URL to notify when a backup begins, ends, or
errors. Create an account at https://cronitor.io if you'd errors. Create an account at https://cronitor.io if you'd
like to use this service. See borgmatic monitoring like to use this service. See borgmatic monitoring
@ -726,8 +769,8 @@ map:
example: example:
https://cronitor.link/d3x0c1 https://cronitor.link/d3x0c1
pagerduty: pagerduty:
type: str type: string
desc: | description: |
PagerDuty integration key used to notify PagerDuty when a PagerDuty integration key used to notify PagerDuty when a
backup errors. Create an account at backup errors. Create an account at
https://www.pagerduty.com/ if you'd like to use this https://www.pagerduty.com/ if you'd like to use this
@ -735,8 +778,8 @@ map:
example: example:
a177cad45bd374409f78906a810a3074 a177cad45bd374409f78906a810a3074
cronhub: cronhub:
type: str type: string
desc: | description: |
Cronhub ping URL to notify when a backup begins, ends, or Cronhub ping URL to notify when a backup begins, ends, or
errors. Create an account at https://cronhub.io if you'd errors. Create an account at https://cronhub.io if you'd
like to use this service. See borgmatic monitoring like to use this service. See borgmatic monitoring
@ -745,7 +788,7 @@ map:
https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d01 https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d01
umask: umask:
type: scalar type: scalar
desc: | description: |
Umask used when executing hooks. Defaults to the umask that Umask used when executing hooks. Defaults to the umask that
borgmatic is run with. borgmatic is run with.
example: 0077 example: 0077

View file

@ -1,9 +1,7 @@
import logging
import os import os
import jsonschema
import pkg_resources import pkg_resources
import pykwalify.core
import pykwalify.errors
import ruamel.yaml import ruamel.yaml
from borgmatic.config import load, normalize, override from borgmatic.config import load, normalize, override
@ -17,15 +15,40 @@ def schema_filename():
return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml') return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml')
def format_error_path_element(path_element):
'''
Given a path element into a JSON data structure, format it for display as a string.
'''
if isinstance(path_element, int):
return str('[{}]'.format(path_element))
return str('.{}'.format(path_element))
def format_error(error):
'''
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
'''
if not error.path:
return 'At the top level: {}'.format(error.message)
formatted_path = ''.join(format_error_path_element(element) for element in error.path)
return "At '{}': {}".format(formatted_path.lstrip('.'), error.message)
class Validation_error(ValueError): class Validation_error(ValueError):
''' '''
A collection of error message strings generated when attempting to validate a particular A collection of error messages generated when attempting to validate a particular
configurartion file. configuration file.
''' '''
def __init__(self, config_filename, error_messages): def __init__(self, config_filename, errors):
'''
Given a configuration filename path and a sequence of
jsonschema.exceptions.ValidationError instances, create a Validation_error.
'''
self.config_filename = config_filename self.config_filename = config_filename
self.error_messages = error_messages self.errors = errors
def __str__(self): def __str__(self):
''' '''
@ -33,7 +56,7 @@ class Validation_error(ValueError):
''' '''
return 'An error occurred while parsing a configuration file at {}:\n'.format( return 'An error occurred while parsing a configuration file at {}:\n'.format(
self.config_filename self.config_filename
) + '\n'.join(self.error_messages) ) + '\n'.join(format_error(error) for error in self.errors)
def apply_logical_validation(config_filename, parsed_configuration): def apply_logical_validation(config_filename, parsed_configuration):
@ -65,29 +88,12 @@ def apply_logical_validation(config_filename, parsed_configuration):
) )
def remove_examples(schema):
'''
pykwalify gets angry if the example field is not a string. So rather than bend to its will,
remove all examples from the given schema before passing the schema to pykwalify.
'''
if 'map' in schema:
for item_name, item_schema in schema['map'].items():
item_schema.pop('example', None)
remove_examples(item_schema)
elif 'seq' in schema:
for item_schema in schema['seq']:
item_schema.pop('example', None)
remove_examples(item_schema)
return schema
def parse_configuration(config_filename, schema_filename, overrides=None): def parse_configuration(config_filename, schema_filename, overrides=None):
''' '''
Given the path to a config filename in YAML format, the path to a schema filename in pykwalify Given the path to a config filename in YAML format, the path to a schema filename in a YAML
YAML schema format, a sequence of configuration file override strings in the form of rendition of JSON Schema format, a sequence of configuration file override strings in the form
"section.option=value", return the parsed configuration as a data structure of nested dicts and of "section.option=value", return the parsed configuration as a data structure of nested dicts
lists corresponding to the schema. Example return value: and lists corresponding to the schema. Example return value:
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, {'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} 'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
@ -95,8 +101,6 @@ def parse_configuration(config_filename, schema_filename, overrides=None):
Raise FileNotFoundError if the file does not exist, PermissionError if the user does not Raise FileNotFoundError if the file does not exist, PermissionError if the user does not
have permissions to read the file, or Validation_error if the config does not match the schema. have permissions to read the file, or Validation_error if the config does not match the schema.
''' '''
logging.getLogger('pykwalify').setLevel(logging.ERROR)
try: try:
config = load.load_configuration(config_filename) config = load.load_configuration(config_filename)
schema = load.load_configuration(schema_filename) schema = load.load_configuration(schema_filename)
@ -106,15 +110,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None):
override.apply_overrides(config, overrides) override.apply_overrides(config, overrides)
normalize.normalize(config) normalize.normalize(config)
validator = pykwalify.core.Core(source_data=config, schema_data=remove_examples(schema)) validator = jsonschema.Draft7Validator(schema)
parsed_result = validator.validate(raise_exception=False) validation_errors = tuple(validator.iter_errors(config))
if validator.validation_errors: if validation_errors:
raise Validation_error(config_filename, validator.validation_errors) raise Validation_error(config_filename, validation_errors)
apply_logical_validation(config_filename, parsed_result) apply_logical_validation(config_filename, config)
return parsed_result return config
def normalize_repository_path(repository): def normalize_repository_path(repository):

View file

@ -1,4 +1,5 @@
import logging import logging
import logging.handlers
import os import os
import sys import sys

View file

@ -30,7 +30,7 @@ setup(
}, },
obsoletes=['atticmatic'], obsoletes=['atticmatic'],
install_requires=( install_requires=(
'pykwalify>=1.6.0,<14.06', 'jsonschema',
'requests', 'requests',
'ruamel.yaml>0.15.0,<0.18.0', 'ruamel.yaml>0.15.0,<0.18.0',
'setuptools', 'setuptools',

View file

@ -1,26 +1,21 @@
appdirs==1.4.4; python_version >= '3.8' appdirs==1.4.4; python_version >= '3.8'
atomicwrites==1.4.0
attrs==20.3.0; python_version >= '3.8' attrs==20.3.0; python_version >= '3.8'
black==19.10b0; python_version >= '3.8' black==19.10b0; python_version >= '3.8'
click==7.1.2; python_version >= '3.8' click==7.1.2; python_version >= '3.8'
colorama==0.4.4 colorama==0.4.4
coverage==5.3 coverage==5.3
docopt==0.6.2
flake8==3.8.4 flake8==3.8.4
flexmock==0.10.4 flexmock==0.10.4
isort==5.6.4 isort==5.9.1
mccabe==0.6.1 mccabe==0.6.1
more-itertools==8.6.0
pluggy==0.13.1 pluggy==0.13.1
pathspec==0.8.1; python_version >= '3.8' pathspec==0.8.1; python_version >= '3.8'
py==1.10.0 py==1.10.0
pycodestyle==2.6.0 pycodestyle==2.6.0
pyflakes==2.2.0 pyflakes==2.2.0
pykwalify==1.7.0 jsonschema==3.2.0
pytest==6.1.2 pytest==6.1.2
pytest-cov==2.10.1 pytest-cov==2.10.1
python-dateutil==2.8.1
PyYAML==5.4.1
regex; python_version >= '3.8' regex; python_version >= '3.8'
requests==2.25.0 requests==2.25.0
ruamel.yaml>0.15.0,<0.18.0 ruamel.yaml>0.15.0,<0.18.0

View file

@ -122,38 +122,44 @@ def test_write_configuration_with_already_existing_directory_does_not_raise():
def test_add_comments_to_configuration_sequence_of_strings_does_not_raise(): def test_add_comments_to_configuration_sequence_of_strings_does_not_raise():
config = module.yaml.comments.CommentedSeq(['foo', 'bar']) config = module.yaml.comments.CommentedSeq(['foo', 'bar'])
schema = {'seq': [{'type': 'str'}]} schema = {'type': 'array', 'items': {'type': 'string'}}
module.add_comments_to_configuration_sequence(config, schema) module.add_comments_to_configuration_sequence(config, schema)
def test_add_comments_to_configuration_sequence_of_maps_does_not_raise(): def test_add_comments_to_configuration_sequence_of_maps_does_not_raise():
config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])]) config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])])
schema = {'seq': [{'map': {'foo': {'desc': 'yo'}}}]} schema = {
'type': 'array',
'items': {'type': 'object', 'properties': {'foo': {'description': 'yo'}}},
}
module.add_comments_to_configuration_sequence(config, schema) module.add_comments_to_configuration_sequence(config, schema)
def test_add_comments_to_configuration_sequence_of_maps_without_description_does_not_raise(): def test_add_comments_to_configuration_sequence_of_maps_without_description_does_not_raise():
config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])]) config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])])
schema = {'seq': [{'map': {'foo': {}}}]} schema = {'type': 'array', 'items': {'type': 'object', 'properties': {'foo': {}}}}
module.add_comments_to_configuration_sequence(config, schema) module.add_comments_to_configuration_sequence(config, schema)
def test_add_comments_to_configuration_map_does_not_raise(): def test_add_comments_to_configuration_object_does_not_raise():
# Ensure that it can deal with fields both in the schema and missing from the schema. # Ensure that it can deal with fields both in the schema and missing from the schema.
config = module.yaml.comments.CommentedMap([('foo', 33), ('bar', 44), ('baz', 55)]) config = module.yaml.comments.CommentedMap([('foo', 33), ('bar', 44), ('baz', 55)])
schema = {'map': {'foo': {'desc': 'Foo'}, 'bar': {'desc': 'Bar'}}} schema = {
'type': 'object',
'properties': {'foo': {'description': 'Foo'}, 'bar': {'description': 'Bar'}},
}
module.add_comments_to_configuration_map(config, schema) module.add_comments_to_configuration_object(config, schema)
def test_add_comments_to_configuration_map_with_skip_first_does_not_raise(): def test_add_comments_to_configuration_object_with_skip_first_does_not_raise():
config = module.yaml.comments.CommentedMap([('foo', 33)]) config = module.yaml.comments.CommentedMap([('foo', 33)])
schema = {'map': {'foo': {'desc': 'Foo'}}} schema = {'type': 'object', 'properties': {'foo': {'description': 'Foo'}}}
module.add_comments_to_configuration_map(config, schema, skip_first=True) module.add_comments_to_configuration_object(config, schema, skip_first=True)
def test_remove_commented_out_sentinel_keeps_other_comments(): def test_remove_commented_out_sentinel_keeps_other_comments():

View file

@ -12,7 +12,7 @@ Parsed_config = namedtuple('Parsed_config', ('location', 'storage', 'retention',
def test_convert_section_generates_integer_value_for_integer_type_in_schema(): def test_convert_section_generates_integer_value_for_integer_type_in_schema():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
source_section_config = OrderedDict([('check_last', '3')]) source_section_config = OrderedDict([('check_last', '3')])
section_schema = {'map': {'check_last': {'type': 'int'}}} section_schema = {'type': 'object', 'properties': {'check_last': {'type': 'integer'}}}
destination_config = module._convert_section(source_section_config, section_schema) destination_config = module._convert_section(source_section_config, section_schema)
@ -21,7 +21,7 @@ def test_convert_section_generates_integer_value_for_integer_type_in_schema():
def test_convert_legacy_parsed_config_transforms_source_config_to_mapping(): def test_convert_legacy_parsed_config_transforms_source_config_to_mapping():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
flexmock(module.generate).should_receive('add_comments_to_configuration_map') flexmock(module.generate).should_receive('add_comments_to_configuration_object')
source_config = Parsed_config( source_config = Parsed_config(
location=OrderedDict([('source_directories', '/home'), ('repository', 'hostname.borg')]), location=OrderedDict([('source_directories', '/home'), ('repository', 'hostname.borg')]),
storage=OrderedDict([('encryption_passphrase', 'supersecret')]), storage=OrderedDict([('encryption_passphrase', 'supersecret')]),
@ -29,7 +29,10 @@ def test_convert_legacy_parsed_config_transforms_source_config_to_mapping():
consistency=OrderedDict([('checks', 'repository')]), consistency=OrderedDict([('checks', 'repository')]),
) )
source_excludes = ['/var'] source_excludes = ['/var']
schema = {'map': defaultdict(lambda: {'map': {}})} schema = {
'type': 'object',
'properties': defaultdict(lambda: {'type': 'object', 'properties': {}}),
}
destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema)
@ -54,7 +57,7 @@ def test_convert_legacy_parsed_config_transforms_source_config_to_mapping():
def test_convert_legacy_parsed_config_splits_space_separated_values(): def test_convert_legacy_parsed_config_splits_space_separated_values():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
flexmock(module.generate).should_receive('add_comments_to_configuration_map') flexmock(module.generate).should_receive('add_comments_to_configuration_object')
source_config = Parsed_config( source_config = Parsed_config(
location=OrderedDict( location=OrderedDict(
[('source_directories', '/home /etc'), ('repository', 'hostname.borg')] [('source_directories', '/home /etc'), ('repository', 'hostname.borg')]
@ -64,7 +67,10 @@ def test_convert_legacy_parsed_config_splits_space_separated_values():
consistency=OrderedDict([('checks', 'repository archives')]), consistency=OrderedDict([('checks', 'repository archives')]),
) )
source_excludes = ['/var'] source_excludes = ['/var']
schema = {'map': defaultdict(lambda: {'map': {}})} schema = {
'type': 'object',
'properties': defaultdict(lambda: {'type': 'object', 'properties': {}}),
}
destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema)

View file

@ -8,24 +8,32 @@ from borgmatic.config import generate as module
def test_schema_to_sample_configuration_generates_config_map_with_examples(): def test_schema_to_sample_configuration_generates_config_map_with_examples():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
flexmock(module).should_receive('add_comments_to_configuration_map') flexmock(module).should_receive('add_comments_to_configuration_object')
schema = { schema = {
'map': OrderedDict( 'type': 'object',
'properties': OrderedDict(
[ [
('section1', {'map': {'field1': OrderedDict([('example', 'Example 1')])}}), (
'section1',
{
'type': 'object',
'properties': {'field1': OrderedDict([('example', 'Example 1')])},
},
),
( (
'section2', 'section2',
{ {
'map': OrderedDict( 'type': 'object',
'properties': OrderedDict(
[ [
('field2', {'example': 'Example 2'}), ('field2', {'example': 'Example 2'}),
('field3', {'example': 'Example 3'}), ('field3', {'example': 'Example 3'}),
] ]
) ),
}, },
), ),
] ]
) ),
} }
config = module._schema_to_sample_configuration(schema) config = module._schema_to_sample_configuration(schema)
@ -41,7 +49,7 @@ def test_schema_to_sample_configuration_generates_config_map_with_examples():
def test_schema_to_sample_configuration_generates_config_sequence_of_strings_with_example(): def test_schema_to_sample_configuration_generates_config_sequence_of_strings_with_example():
flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list) flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list)
flexmock(module).should_receive('add_comments_to_configuration_sequence') flexmock(module).should_receive('add_comments_to_configuration_sequence')
schema = {'seq': [{'type': 'str'}], 'example': ['hi']} schema = {'type': 'array', 'items': {'type': 'string'}, 'example': ['hi']}
config = module._schema_to_sample_configuration(schema) config = module._schema_to_sample_configuration(schema)
@ -51,15 +59,15 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_strings_wit
def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_examples(): def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_examples():
flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list) flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list)
flexmock(module).should_receive('add_comments_to_configuration_sequence') flexmock(module).should_receive('add_comments_to_configuration_sequence')
flexmock(module).should_receive('add_comments_to_configuration_map') flexmock(module).should_receive('add_comments_to_configuration_object')
schema = { schema = {
'seq': [ 'type': 'array',
{ 'items': {
'map': OrderedDict( 'type': 'object',
[('field1', {'example': 'Example 1'}), ('field2', {'example': 'Example 2'})] 'properties': OrderedDict(
) [('field1', {'example': 'Example 1'}), ('field2', {'example': 'Example 2'})]
} ),
] },
} }
config = module._schema_to_sample_configuration(schema) config = module._schema_to_sample_configuration(schema)

View file

@ -4,8 +4,33 @@ from flexmock import flexmock
from borgmatic.config import validate as module from borgmatic.config import validate as module
def test_validation_error_str_contains_error_messages_and_config_filename(): def test_format_error_path_element_formats_array_index():
error = module.Validation_error('config.yaml', ('oops', 'uh oh')) module.format_error_path_element(3) == '[3]'
def test_format_error_path_element_formats_property():
module.format_error_path_element('foo') == '.foo'
def test_format_error_formats_error_including_path():
flexmock(module).format_error_path_element = lambda element: '.{}'.format(element)
error = flexmock(message='oops', path=['foo', 'bar'])
assert module.format_error(error) == "At 'foo.bar': oops"
def test_format_error_formats_error_without_path():
flexmock(module).should_receive('format_error_path_element').never()
error = flexmock(message='oops', path=[])
assert module.format_error(error) == 'At the top level: oops'
def test_validation_error_string_contains_error_messages_and_config_filename():
flexmock(module).format_error = lambda error: error.message
error = module.Validation_error(
'config.yaml', (flexmock(message='oops', path=None), flexmock(message='uh oh'))
)
result = str(error) result = str(error)
@ -15,6 +40,8 @@ def test_validation_error_str_contains_error_messages_and_config_filename():
def test_apply_logical_validation_raises_if_archive_name_format_present_without_prefix(): def test_apply_logical_validation_raises_if_archive_name_format_present_without_prefix():
flexmock(module).format_error = lambda error: error.message
with pytest.raises(module.Validation_error): with pytest.raises(module.Validation_error):
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
@ -26,6 +53,8 @@ def test_apply_logical_validation_raises_if_archive_name_format_present_without_
def test_apply_logical_validation_raises_if_archive_name_format_present_without_retention_prefix(): def test_apply_logical_validation_raises_if_archive_name_format_present_without_retention_prefix():
flexmock(module).format_error = lambda error: error.message
with pytest.raises(module.Validation_error): with pytest.raises(module.Validation_error):
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
@ -38,6 +67,8 @@ def test_apply_logical_validation_raises_if_archive_name_format_present_without_
def test_apply_locical_validation_raises_if_unknown_repository_in_check_repositories(): def test_apply_locical_validation_raises_if_unknown_repository_in_check_repositories():
flexmock(module).format_error = lambda error: error.message
with pytest.raises(module.Validation_error): with pytest.raises(module.Validation_error):
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
@ -75,27 +106,6 @@ def test_apply_logical_validation_does_not_raise_otherwise():
module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}}) module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}})
def test_remove_examples_strips_examples_from_map():
schema = {
'map': {
'foo': {'desc': 'thing1', 'example': 'bar'},
'baz': {'desc': 'thing2', 'example': 'quux'},
}
}
module.remove_examples(schema)
assert schema == {'map': {'foo': {'desc': 'thing1'}, 'baz': {'desc': 'thing2'}}}
def test_remove_examples_strips_examples_from_sequence_of_maps():
schema = {'seq': [{'map': {'foo': {'desc': 'thing', 'example': 'bar'}}, 'example': 'stuff'}]}
module.remove_examples(schema)
assert schema == {'seq': [{'map': {'foo': {'desc': 'thing'}}}]}
def test_normalize_repository_path_passes_through_remote_repository(): def test_normalize_repository_path_passes_through_remote_repository():
repository = 'example.org:test.borg' repository = 'example.org:test.borg'