Merge branch 'main' into borg2-archive-flags

This commit is contained in:
jetchirag 2023-04-27 16:57:29 +00:00
commit a62ac42cca
19 changed files with 274 additions and 94 deletions

View file

@ -1,19 +1,20 @@
---
kind: pipeline kind: pipeline
name: python-3-8-alpine-3-13 name: python-3-8-alpine-3-13
services: services:
- name: postgresql - name: postgresql
image: postgres:13.1-alpine image: docker.io/postgres:13.1-alpine
environment: environment:
POSTGRES_PASSWORD: test POSTGRES_PASSWORD: test
POSTGRES_DB: test POSTGRES_DB: test
- name: mysql - name: mysql
image: mariadb:10.5 image: docker.io/mariadb:10.5
environment: environment:
MYSQL_ROOT_PASSWORD: test MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test MYSQL_DATABASE: test
- name: mongodb - name: mongodb
image: mongo:5.0.5 image: docker.io/mongo:5.0.5
environment: environment:
MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: test MONGO_INITDB_ROOT_PASSWORD: test
@ -23,7 +24,7 @@ clone:
steps: steps:
- name: build - name: build
image: alpine:3.13 image: docker.io/alpine:3.13
environment: environment:
TEST_CONTAINER: true TEST_CONTAINER: true
pull: always pull: always
@ -32,27 +33,32 @@ steps:
--- ---
kind: pipeline kind: pipeline
name: documentation name: documentation
type: exec
platform:
os: linux
arch: amd64
clone: clone:
skip_verify: true skip_verify: true
steps: steps:
- name: build - name: build
image: plugins/docker environment:
settings: USERNAME:
username:
from_secret: docker_username from_secret: docker_username
password: PASSWORD:
from_secret: docker_password from_secret: docker_password
registry: projects.torsion.org IMAGE_NAME: projects.torsion.org/borgmatic-collective/borgmatic:docs
repo: projects.torsion.org/borgmatic-collective/borgmatic commands:
tags: docs - podman login --username "$USERNAME" --password "$PASSWORD" projects.torsion.org
dockerfile: docs/Dockerfile - podman build --tag "$IMAGE_NAME" --file docs/Dockerfile --storage-opt "overlay.mount_program=/usr/bin/fuse-overlayfs" .
- podman push "$IMAGE_NAME"
trigger: trigger:
repo: repo:
- borgmatic-collective/borgmatic - borgmatic-collective/borgmatic
branch: branch:
- master - main
event: event:
- push - push

10
NEWS
View file

@ -2,6 +2,14 @@
* #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema" * #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema"
flag. See the documentation for more information: flag. See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas
* #678: Fix error from PostgreSQL when dumping a database with a "format" of "plain".
* #678: Fix PostgreSQL hook to support "psql_command" and "pg_restore_command" options containing
commands with arguments.
* #678: Fix calls to psql in PostgreSQL hook to ignore "~/.psqlrc", whose settings can break
database dumping.
* #682: Fix "source_directories_must_exist" option to expand globs and tildes in source directories.
* #684: Rename "master" development branch to "main" to use more inclusive language. You'll need to
update your development checkouts accordingly.
1.7.12 1.7.12
* #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file. * #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file.
@ -367,7 +375,7 @@
* #398: Clarify canonical home of borgmatic in documentation. * #398: Clarify canonical home of borgmatic in documentation.
* #406: Clarify that spaces in path names should not be backslashed in path names. * #406: Clarify that spaces in path names should not be backslashed in path names.
* #423: Fix error handling to error loudly when Borg gets killed due to running out of memory! * #423: Fix error handling to error loudly when Borg gets killed due to running out of memory!
* Fix build so as not to attempt to build and push documentation for a non-master branch. * Fix build so as not to attempt to build and push documentation for a non-main branch.
* "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13. * "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13.
* Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama. * Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama.
IRC connection info: https://torsion.org/borgmatic/#issues IRC connection info: https://torsion.org/borgmatic/#issues

View file

@ -165,5 +165,5 @@ Also, please check out the [borgmatic development
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
info on cloning source code, running tests, etc. info on cloning source code, running tests, etc.
<a href="https://build.torsion.org/borgmatic-collective/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/master)</a> <a href="https://build.torsion.org/borgmatic-collective/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/main)</a>

View file

@ -7,8 +7,8 @@ permalink: security-policy/index.html
While we want to hear about security vulnerabilities in all versions of While we want to hear about security vulnerabilities in all versions of
borgmatic, security fixes are only made to the most recently released version. borgmatic, security fixes are only made to the most recently released version.
It's simply not practical for our small volunteer effort to maintain multiple It's not practical for our small volunteer effort to maintain multiple release
release branches and put out separate security patches for each. branches and put out separate security patches for each.
## Reporting a vulnerability ## Reporting a vulnerability

View file

@ -314,7 +314,7 @@ def check_all_source_directories_exist(source_directories):
missing_directories = [ missing_directories = [
source_directory source_directory
for source_directory in source_directories for source_directory in source_directories
if not os.path.exists(source_directory) if not all([os.path.exists(directory) for directory in expand_directory(source_directory)])
] ]
if missing_directories: if missing_directories:
raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}") raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}")

View file

@ -17,8 +17,8 @@ def resolve_archive_name(
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, a local Borg Given a local or remote repository path, an archive name, a storage config dict, a local Borg
path, and a remote Borg path, simply return the archive name. But if the archive name is path, and a remote Borg path, return the archive name. But if the archive name is "latest",
"latest", then instead introspect the repository for the latest archive and return its name. then instead introspect the repository for the latest archive and return its name.
Raise ValueError if "latest" is given but there are no archives in the repository. Raise ValueError if "latest" is given but there are no archives in the repository.
''' '''

View file

@ -260,7 +260,7 @@ def merge_source_configuration_into_destination(destination_config, source_confi
) )
continue continue
# This is some sort of scalar. Simply set it into the destination. # This is some sort of scalar. Set it into the destination.
destination_config[field_name] = source_config[field_name] destination_config[field_name] = source_config[field_name]
return destination_config return destination_config

View file

@ -60,7 +60,7 @@ properties:
or port. If systemd service is used, then add local or port. If systemd service is used, then add local
repository paths in the systemd service file to the repository paths in the systemd service file to the
ReadWritePaths list. Prior to borgmatic 1.7.10, repositories ReadWritePaths list. Prior to borgmatic 1.7.10, repositories
was just a list of plain path strings. was a list of plain path strings.
example: example:
- path: ssh://user@backupserver/./sourcehostname.borg - path: ssh://user@backupserver/./sourcehostname.borg
label: backupserver label: backupserver
@ -836,25 +836,25 @@ properties:
Command to use instead of "pg_dump" or Command to use instead of "pg_dump" or
"pg_dumpall". This can be used to run a specific "pg_dumpall". This can be used to run a specific
pg_dump version (e.g., one inside a running pg_dump version (e.g., one inside a running
docker container). Defaults to "pg_dump" for container). Defaults to "pg_dump" for single
single database dump or "pg_dumpall" to dump database dump or "pg_dumpall" to dump all
all databases. databases.
example: docker exec my_pg_container pg_dump example: docker exec my_pg_container pg_dump
pg_restore_command: pg_restore_command:
type: string type: string
description: | description: |
Command to use instead of "pg_restore". This Command to use instead of "pg_restore". This
can be used to run a specific pg_restore can be used to run a specific pg_restore
version (e.g., one inside a running docker version (e.g., one inside a running container).
container). Defaults to "pg_restore". Defaults to "pg_restore".
example: docker exec my_pg_container pg_restore example: docker exec my_pg_container pg_restore
psql_command: psql_command:
type: string type: string
description: | description: |
Command to use instead of "psql". This can be Command to use instead of "psql". This can be
used to run a specific psql version (e.g., used to run a specific psql version (e.g.,
one inside a running docker container). one inside a running container). Defaults to
Defaults to "psql". "psql".
example: docker exec my_pg_container psql example: docker exec my_pg_container psql
options: options:
type: string type: string
@ -1216,7 +1216,7 @@ properties:
type: string type: string
description: | description: |
Healthchecks ping URL or UUID to notify when a Healthchecks ping URL or UUID to notify when a
backup begins, ends, errors or just to send logs. backup begins, ends, errors, or to send only logs.
example: https://hc-ping.com/your-uuid-here example: https://hc-ping.com/your-uuid-here
verify_tls: verify_tls:
type: boolean type: boolean

View file

@ -2,6 +2,7 @@ import csv
import itertools import itertools
import logging import logging
import os import os
import shlex
from borgmatic.execute import ( from borgmatic.execute import (
execute_command, execute_command,
@ -60,8 +61,10 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
if dry_run: if dry_run:
return () return ()
psql_command = shlex.split(database.get('psql_command') or 'psql')
list_command = ( list_command = (
('psql', '--list', '--no-password', '--csv', '--tuples-only') tuple(psql_command)
+ ('--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only')
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
@ -210,9 +213,10 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(location_config), database['name'], database.get('hostname')
) )
psql_command = database.get('psql_command') or 'psql' psql_command = shlex.split(database.get('psql_command') or 'psql')
analyze_command = ( analyze_command = (
(psql_command, '--no-password', '--quiet') tuple(psql_command)
+ ('--no-password', '--no-psqlrc', '--quiet')
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
@ -220,14 +224,13 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
+ (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ()) + (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ())
+ ('--command', 'ANALYZE') + ('--command', 'ANALYZE')
) )
pg_restore_command = database.get('pg_restore_command') or 'pg_restore' use_psql_command = all_databases or database.get('format') == 'plain'
pg_restore_command = shlex.split(database.get('pg_restore_command') or 'pg_restore')
restore_command = ( restore_command = (
(psql_command if all_databases else pg_restore_command, '--no-password') tuple(psql_command if use_psql_command else pg_restore_command)
+ ( + ('--no-password',)
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name']) + (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean'))
if not all_databases + (('--dbname', database['name']) if not all_databases else ())
else ()
)
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())

22
docs/docker-compose.yaml Normal file
View file

@ -0,0 +1,22 @@
version: '3'
services:
docs:
image: borgmatic-docs
container_name: docs
ports:
- 8080:80
build:
dockerfile: docs/Dockerfile
context: ..
args:
ENVIRONMENT: dev
message:
image: alpine
container_name: message
command:
- sh
- -c
- |
echo "You can view dev docs at http://localhost:8080"
depends_on:
- docs

View file

@ -138,9 +138,9 @@ hooks:
### Containers ### Containers
If your database is running within a Docker container and borgmatic is too, no If your database is running within a container and borgmatic is too, no
problem—simply configure borgmatic to connect to the container's name on its problem—configure borgmatic to connect to the container's name on its exposed
exposed port. For instance: port. For instance:
```yaml ```yaml
hooks: hooks:
@ -154,10 +154,10 @@ hooks:
But what if borgmatic is running on the host? You can still connect to a But what if borgmatic is running on the host? You can still connect to a
database container if its ports are properly exposed to the host. For database container if its ports are properly exposed to the host. For
instance, when running the database container with Docker, you can specify instance, when running the database container, you can specify `--publish
`--publish 127.0.0.1:5433:5432` so that it exposes the container's port 5432 127.0.0.1:5433:5432` so that it exposes the container's port 5432 to port 5433
to port 5433 on the host (only reachable on localhost, in this case). Or the on the host (only reachable on localhost, in this case). Or the same thing
same thing with Docker Compose: with Docker Compose:
```yaml ```yaml
services: services:
@ -179,7 +179,7 @@ hooks:
password: trustsome1 password: trustsome1
``` ```
Of course, alter the ports in these examples to suit your particular database You can alter the ports in these examples to suit your particular database
system. system.
@ -397,9 +397,9 @@ dumps with any database system.
With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors
when borgmatic tries to connect to your database, a natural reaction is to when borgmatic tries to connect to your database, a natural reaction is to
increase your borgmatic verbosity with `--verbosity 2` and go looking in the increase your borgmatic verbosity with `--verbosity 2` and go looking in the
logs. You'll notice however that your database password does not show up in logs. You'll notice though that your database password does not show up in the
the logs. This is likely not the cause of the authentication problem unless logs. This is likely not the cause of the authentication problem unless you
you mistyped your password, however; borgmatic passes your password to the mistyped your password, however; borgmatic passes your password to the
database via an environment variable that does not appear in the logs. database via an environment variable that does not appear in the logs.
The cause of an authentication error is often on the database side—in the The cause of an authentication error is often on the database side—in the

View file

@ -7,7 +7,7 @@ eleventyNavigation:
--- ---
## Source code ## Source code
To get set up to hack on borgmatic, first clone master via HTTPS or SSH: To get set up to hack on borgmatic, first clone it via HTTPS or SSH:
```bash ```bash
git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git
@ -87,19 +87,20 @@ tox -e codespell
borgmatic additionally includes some end-to-end tests that integration test borgmatic additionally includes some end-to-end tests that integration test
with Borg and supported databases for a few representative scenarios. These with Borg and supported databases for a few representative scenarios. These
tests don't run by default when running `tox`, because they're relatively slow tests don't run by default when running `tox`, because they're relatively slow
and depend on Docker containers for runtime dependencies. These tests tests do and depend on containers for runtime dependencies. These tests do run on the
run on the continuous integration (CI) server, and running them on your continuous integration (CI) server, and running them on your developer machine
developer machine is the closest thing to CI test parity. is the closest thing to CI-test parity.
If you would like to run the full test suite, first install Docker and [Docker If you would like to run the full test suite, first install Docker (or Podman;
Compose](https://docs.docker.com/compose/install/). Then run: see below) and [Docker Compose](https://docs.docker.com/compose/install/).
Then run:
```bash ```bash
scripts/run-end-to-end-dev-tests scripts/run-end-to-end-dev-tests
``` ```
Note that this scripts assumes you have permission to run Docker. If you This script assumes you have permission to run `docker`. If you don't, then
don't, then you may need to run with `sudo`. you may need to run with `sudo`.
#### Podman #### Podman
@ -112,13 +113,13 @@ borgmatic's end-to-end tests optionally support using
Setting up Podman is outside the scope of this documentation, but here are Setting up Podman is outside the scope of this documentation, but here are
some key points to double-check: some key points to double-check:
* Install Podman along with `podman-docker` and your desired networking * Install Podman and your desired networking support.
support.
* Configure `/etc/subuid` and `/etc/subgid` to map users/groups for the * Configure `/etc/subuid` and `/etc/subgid` to map users/groups for the
non-root user who will run tests. non-root user who will run tests.
* Create a non-root Podman socket for that user: * Create a non-root Podman socket for that user:
```bash ```bash
systemctl --user enable --now podman.socket systemctl --user enable --now podman.socket
systemctl --user start --now podman.socket
``` ```
Then you'll be able to run end-to-end tests as per normal, and the test script Then you'll be able to run end-to-end tests as per normal, and the test script
@ -161,11 +162,12 @@ To build and view a copy of the documentation with your local changes, run the
following from the root of borgmatic's source code: following from the root of borgmatic's source code:
```bash ```bash
sudo scripts/dev-docs scripts/dev-docs
``` ```
This requires Docker to be installed on your system. You may not need to use This requires Docker (or Podman; see below) to be installed on your system.
sudo if your non-root user has permissions to run Docker. This script assumes you have permission to run `docker`. If you don't, then
you may need to run with `sudo`.
After you run the script, you can point your web browser at After you run the script, you can point your web browser at
http://localhost:8080 to view the documentation with your changes. http://localhost:8080 to view the documentation with your changes.
@ -183,5 +185,5 @@ borgmatic's developer build for documentation optionally supports using
[Podman](https://podman.io/) instead of Docker. [Podman](https://podman.io/) instead of Docker.
Setting up Podman is outside the scope of this documentation. But once you Setting up Podman is outside the scope of this documentation. But once you
install `podman-docker`, then `scripts/dev-docs` should automatically use install and configure Podman, then `scripts/dev-docs` should automatically use
Podman instead of Docker. Podman instead of Docker.

View file

@ -169,7 +169,7 @@ brackets. For instance, the default log format is: `[{asctime}] {levelname}:
{message}`. This means each log message is recorded as the log time (in square {message}`. This means each log message is recorded as the log time (in square
brackets), a logging level name, a colon, and the actual log message. brackets), a logging level name, a colon, and the actual log message.
So if you just want each log message to get logged *without* a timestamp or a So if you only want each log message to get logged *without* a timestamp or a
logging level name: logging level name:
```bash ```bash

View file

@ -86,8 +86,8 @@ uses the `archive_name_format` option to automatically limit which archives
get used for actions operating on multiple archives. This prevents, for get used for actions operating on multiple archives. This prevents, for
instance, duplicate archives from showing up in `rlist` or `info` results—even instance, duplicate archives from showing up in `rlist` or `info` results—even
if the same repository appears in multiple borgmatic configuration files. To if the same repository appears in multiple borgmatic configuration files. To
take advantage of this feature, simply use a different `archive_name_format` take advantage of this feature, use a different `archive_name_format` in each
in each configuration file. configuration file.
Under the hood, borgmatic accomplishes this by substituting globs for certain Under the hood, borgmatic accomplishes this by substituting globs for certain
ephemeral data placeholders in your `archive_name_format`—and using the result ephemeral data placeholders in your `archive_name_format`—and using the result
@ -108,8 +108,8 @@ archives used for actions like `rlist`, `info`, `prune`, `check`, etc.
The end result is that when borgmatic runs the actions for a particular The end result is that when borgmatic runs the actions for a particular
application-specific configuration file, it only operates on the archives application-specific configuration file, it only operates on the archives
created for that application. Of course, this doesn't apply to actions like created for that application. But this doesn't apply to actions like `compact`
`compact` that operate on an entire repository. that operate on an entire repository.
If this behavior isn't quite smart enough for your needs, you can use the If this behavior isn't quite smart enough for your needs, you can use the
`match_archives` option to override the pattern that borgmatic uses for `match_archives` option to override the pattern that borgmatic uses for

View file

@ -82,8 +82,8 @@ on a relatively dedicated system, then a global install can work out fine.
Besides the approaches described above, there are several other options for Besides the approaches described above, there are several other options for
installing borgmatic: installing borgmatic:
* [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files) * [container image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files)
* [Docker image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/) * [container image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/)
* [Debian](https://tracker.debian.org/pkg/borgmatic) * [Debian](https://tracker.debian.org/pkg/borgmatic)
* [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic) * [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic)
* [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic) * [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
@ -96,7 +96,7 @@ installing borgmatic:
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/) * [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
* [NixOS](https://search.nixos.org/packages?show=borgmatic&sort=relevance&type=packages&query=borgmatic) * [NixOS](https://search.nixos.org/packages?show=borgmatic&sort=relevance&type=packages&query=borgmatic)
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup) * [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
* [virtualenv](https://virtualenv.pypa.io/en/stable/) * [Unraid](https://unraid.net/community/apps?q=borgmatic#r)
## Hosting providers ## Hosting providers
@ -279,7 +279,7 @@ that, you can configure a separate job runner to invoke it periodically.
### cron ### cron
If you're using cron, download the [sample cron If you're using cron, download the [sample cron
file](https://projects.torsion.org/borgmatic-collective/borgmatic/src/master/sample/cron/borgmatic). file](https://projects.torsion.org/borgmatic-collective/borgmatic/src/main/sample/cron/borgmatic).
Then, from the directory where you downloaded it: Then, from the directory where you downloaded it:
```bash ```bash
@ -303,9 +303,9 @@ you may already have borgmatic systemd service and timer files. If so, you may
be able to skip some of the steps below.) be able to skip some of the steps below.)
First, download the [sample systemd service First, download the [sample systemd service
file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.service) file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/main/sample/systemd/borgmatic.service)
and the [sample systemd timer and the [sample systemd timer
file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.timer). file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/main/sample/systemd/borgmatic.timer).
Then, from the directory where you downloaded them: Then, from the directory where you downloaded them:

View file

@ -2,8 +2,10 @@
set -e set -e
docker build --tag borgmatic-docs --build-arg ENVIRONMENT=dev --file docs/Dockerfile . USER_PODMAN_SOCKET_PATH=/run/user/$UID/podman/podman.sock
echo
echo "You can view dev docs at http://localhost:8080" if [ -e "$USER_PODMAN_SOCKET_PATH" ]; then
echo export DOCKER_HOST="unix://$USER_PODMAN_SOCKET_PATH"
docker run --interactive --tty --publish 8080:80 --rm borgmatic-docs fi
docker-compose --file docs/docker-compose.yaml up --build --force-recreate

View file

@ -118,7 +118,7 @@ def test_database_dump_and_restore():
# Restore the database from the archive. # Restore the database from the archive.
subprocess.check_call( subprocess.check_call(
['borgmatic', '--config', config_path, 'restore', '--archive', archive_name] ['borgmatic', '-v', '2', '--config', config_path, 'restore', '--archive', archive_name]
) )
finally: finally:
os.chdir(original_working_directory) os.chdir(original_working_directory)

View file

@ -2550,7 +2550,7 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([])
flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module).should_receive('check_all_source_directories_exist').and_raise(ValueError)
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.create_archive( module.create_archive(
@ -2565,3 +2565,26 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_
storage_config={}, storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
) )
def test_check_all_source_directories_exist_with_glob_and_tilde_directories():
flexmock(module).should_receive('expand_directory').with_args('foo*').and_return(
('foo', 'food')
)
flexmock(module).should_receive('expand_directory').with_args('~/bar').and_return(
('/root/bar',)
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.os.path).should_receive('exists').with_args('foo').and_return(True)
flexmock(module.os.path).should_receive('exists').with_args('food').and_return(True)
flexmock(module.os.path).should_receive('exists').with_args('/root/bar').and_return(True)
module.check_all_source_directories_exist(['foo*', '~/bar'])
def test_check_all_source_directories_exist_with_non_existent_directory_raises():
flexmock(module).should_receive('expand_directory').with_args('foo').and_return(('foo',))
flexmock(module.os.path).should_receive('exists').and_return(False)
with pytest.raises(ValueError):
module.check_all_source_directories_exist(['foo'])

View file

@ -56,6 +56,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostnam
'psql', 'psql',
'--list', '--list',
'--no-password', '--no-password',
'--no-psqlrc',
'--csv', '--csv',
'--tuples-only', '--tuples-only',
'--host', '--host',
@ -75,7 +76,16 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostnam
def test_database_names_to_dump_with_all_and_format_lists_databases_with_username(): def test_database_names_to_dump_with_all_and_format_lists_databases_with_username():
database = {'name': 'all', 'format': 'custom', 'username': 'postgres'} database = {'name': 'all', 'format': 'custom', 'username': 'postgres'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('psql', '--list', '--no-password', '--csv', '--tuples-only', '--username', 'postgres'), (
'psql',
'--list',
'--no-password',
'--no-psqlrc',
'--csv',
'--tuples-only',
'--username',
'postgres',
),
extra_environment=object, extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"') ).and_return('foo,test,\nbar,test,"stuff and such"')
@ -88,7 +98,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_usernam
def test_database_names_to_dump_with_all_and_format_lists_databases_with_options(): def test_database_names_to_dump_with_all_and_format_lists_databases_with_options():
database = {'name': 'all', 'format': 'custom', 'list_options': '--harder'} database = {'name': 'all', 'format': 'custom', 'list_options': '--harder'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('psql', '--list', '--no-password', '--csv', '--tuples-only', '--harder'), ('psql', '--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only', '--harder'),
extra_environment=object, extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"') ).and_return('foo,test,\nbar,test,"stuff and such"')
@ -109,6 +119,28 @@ def test_database_names_to_dump_with_all_and_format_excludes_particular_database
) )
def test_database_names_to_dump_with_all_and_psql_command_uses_custom_command():
database = {'name': 'all', 'format': 'custom', 'psql_command': 'docker exec mycontainer psql'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
(
'docker',
'exec',
'mycontainer',
'psql',
'--list',
'--no-password',
'--no-psqlrc',
'--csv',
'--tuples-only',
),
extra_environment=object,
).and_return('foo,text').once()
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
)
def test_dump_databases_runs_pg_dump_for_each_database(): def test_dump_databases_runs_pg_dump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
@ -433,7 +465,16 @@ def test_restore_database_dump_runs_pg_restore():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -489,6 +530,7 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--host', '--host',
'database.example.org', 'database.example.org',
@ -539,6 +581,7 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--username', '--username',
'postgres', 'postgres',
@ -589,6 +632,7 @@ def test_restore_database_dump_runs_pg_restore_with_options():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--dbname', '--dbname',
'foo', 'foo',
@ -612,14 +656,51 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args( flexmock(module).should_receive('execute_command_with_processes').with_args(
('psql', '--no-password'), (
'psql',
'--no-password',
'--no-psqlrc',
),
processes=[extract_process], processes=[extract_process],
output_log_level=logging.DEBUG, output_log_level=logging.DEBUG,
input_file=extract_process.stdout, input_file=extract_process.stdout,
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--command', 'ANALYZE'), ('psql', '--no-password', '--no-psqlrc', '--quiet', '--command', 'ANALYZE'),
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_runs_psql_for_plain_database_dump():
database_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
('psql', '--no-password', '--no-psqlrc', '--dbname', 'foo'),
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
extra_environment={'PGSSLMODE': 'disable'},
).once()
flexmock(module).should_receive('execute_command').with_args(
(
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -632,8 +713,8 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
database_config = [ database_config = [
{ {
'name': 'foo', 'name': 'foo',
'pg_restore_command': 'special_pg_restore', 'pg_restore_command': 'docker exec mycontainer pg_restore',
'psql_command': 'special_psql', 'psql_command': 'docker exec mycontainer psql',
'schemas': None, 'schemas': None,
} }
] ]
@ -644,7 +725,10 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args( flexmock(module).should_receive('execute_command_with_processes').with_args(
( (
'special_pg_restore', 'docker',
'exec',
'mycontainer',
'pg_restore',
'--no-password', '--no-password',
'--if-exists', '--if-exists',
'--exit-on-error', '--exit-on-error',
@ -658,7 +742,19 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('special_psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'docker',
'exec',
'mycontainer',
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -703,7 +799,16 @@ def test_restore_database_dump_without_extract_process_restores_from_disk():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -739,7 +844,16 @@ def test_restore_database_dump_with_schemas_restores_schemas():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()