summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrej Shadura <andrew.shadura@collabora.co.uk>2020-01-16 14:41:09 +0100
committerAndrej Shadura <andrew.shadura@collabora.co.uk>2020-01-16 14:41:09 +0100
commitab921aa6de28f891cc9744c3028547478ecfa80f (patch)
tree58b330c1c0047b45adb85dd896bf5dd25076c78d
parent3e3ca246cd4faa4b3dc1c78002f44ea28ec2680e (diff)
New upstream version 1.4.21
-rw-r--r--.drone.yml70
-rw-r--r--.eleventy.js2
-rw-r--r--.gitea/issue_template.md4
-rw-r--r--AUTHORS2
-rw-r--r--NEWS83
-rw-r--r--README.md56
-rw-r--r--borgmatic/borg/check.py24
-rw-r--r--borgmatic/borg/create.py26
-rw-r--r--borgmatic/borg/extract.py15
-rw-r--r--borgmatic/borg/info.py4
-rw-r--r--borgmatic/borg/init.py20
-rw-r--r--borgmatic/borg/list.py7
-rw-r--r--borgmatic/borg/mount.py46
-rw-r--r--borgmatic/borg/prune.py10
-rw-r--r--borgmatic/borg/umount.py20
-rw-r--r--borgmatic/commands/arguments.py89
-rw-r--r--borgmatic/commands/borgmatic.py270
-rw-r--r--borgmatic/config/override.py71
-rw-r--r--borgmatic/config/schema.yaml79
-rw-r--r--borgmatic/config/validate.py38
-rw-r--r--borgmatic/execute.py81
-rw-r--r--borgmatic/hooks/cronhub.py22
-rw-r--r--borgmatic/hooks/cronitor.py24
-rw-r--r--borgmatic/hooks/dispatch.py61
-rw-r--r--borgmatic/hooks/dump.py175
-rw-r--r--borgmatic/hooks/healthchecks.py85
-rw-r--r--borgmatic/hooks/monitor.py9
-rw-r--r--borgmatic/hooks/mysql.py111
-rw-r--r--borgmatic/hooks/postgresql.py139
-rw-r--r--borgmatic/logger.py54
-rw-r--r--docs/Dockerfile2
-rw-r--r--docs/_includes/components/suggestion-form.html8
-rw-r--r--docs/how-to/backup-your-databases.md56
-rw-r--r--docs/how-to/develop-on-borgmatic.md16
-rw-r--r--docs/how-to/extract-a-backup.md35
-rw-r--r--docs/how-to/inspect-your-backups.md29
-rw-r--r--docs/how-to/make-per-application-backups.md39
-rw-r--r--docs/how-to/monitor-your-backups.md51
-rw-r--r--docs/how-to/set-up-backups.md29
-rw-r--r--docs/how-to/upgrade.md6
-rw-r--r--docs/static/borgbase.pngbin0 -> 5822 bytes
-rw-r--r--docs/static/borgmatic.png (renamed from static/borgmatic.png)bin3593 -> 3593 bytes
-rw-r--r--docs/static/borgmatic.svg (renamed from static/borgmatic.svg)0
-rw-r--r--docs/static/cronhub.pngbin0 -> 23536 bytes
-rw-r--r--docs/static/cronitor.pngbin0 -> 10364 bytes
-rw-r--r--docs/static/healthchecks.pngbin0 -> 4556 bytes
-rw-r--r--docs/static/mariadb.pngbin0 -> 9474 bytes
-rw-r--r--docs/static/mysql.pngbin0 -> 3761 bytes
-rw-r--r--docs/static/postgresql.pngbin0 -> 31528 bytes
-rw-r--r--docs/static/rsyncnet.pngbin0 -> 7493 bytes
-rwxr-xr-xscripts/release3
-rwxr-xr-xscripts/run-full-dev-tests14
-rwxr-xr-xscripts/run-full-tests18
-rwxr-xr-xscripts/run-tests13
-rw-r--r--setup.py2
-rw-r--r--tests/end-to-end/docker-compose.yaml25
-rw-r--r--tests/end-to-end/test_borgmatic.py8
-rw-r--r--tests/end-to-end/test_database.py83
-rw-r--r--tests/integration/commands/test_arguments.py44
-rw-r--r--tests/integration/config/test_override.py40
-rw-r--r--tests/integration/config/test_validate.py27
-rw-r--r--tests/integration/test_execute.py53
-rw-r--r--tests/unit/borg/test_check.py33
-rw-r--r--tests/unit/borg/test_create.py147
-rw-r--r--tests/unit/borg/test_extract.py20
-rw-r--r--tests/unit/borg/test_info.py29
-rw-r--r--tests/unit/borg/test_init.py45
-rw-r--r--tests/unit/borg/test_list.py75
-rw-r--r--tests/unit/borg/test_mount.py146
-rw-r--r--tests/unit/borg/test_prune.py21
-rw-r--r--tests/unit/borg/test_umount.py33
-rw-r--r--tests/unit/commands/test_borgmatic.py128
-rw-r--r--tests/unit/config/test_override.py82
-rw-r--r--tests/unit/config/test_validate.py40
-rw-r--r--tests/unit/hooks/test_cronhub.py32
-rw-r--r--tests/unit/hooks/test_cronitor.py25
-rw-r--r--tests/unit/hooks/test_dispatch.py68
-rw-r--r--tests/unit/hooks/test_dump.py183
-rw-r--r--tests/unit/hooks/test_healthchecks.py105
-rw-r--r--tests/unit/hooks/test_mysql.py216
-rw-r--r--tests/unit/hooks/test_postgresql.py172
-rw-r--r--tests/unit/test_execute.py159
-rw-r--r--tests/unit/test_logger.py52
-rw-r--r--tox.ini6
84 files changed, 3333 insertions, 782 deletions
diff --git a/.drone.yml b/.drone.yml
index 8137720..89987e3 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -2,52 +2,112 @@
kind: pipeline
name: python-3-5-alpine-3-10
+services:
+ - name: postgresql
+ image: postgres:11.6-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ - name: mysql
+ image: mariadb:10.3
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+
steps:
- name: build
image: python:3.5-alpine3.10
pull: always
commands:
- - scripts/run-tests
+ - scripts/run-full-tests
---
kind: pipeline
name: python-3-6-alpine-3-10
+services:
+ - name: postgresql
+ image: postgres:11.6-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ - name: mysql
+ image: mariadb:10.3
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+
steps:
- name: build
image: python:3.6-alpine3.10
pull: always
commands:
- - scripts/run-tests
+ - scripts/run-full-tests
---
kind: pipeline
name: python-3-7-alpine-3-10
+services:
+ - name: postgresql
+ image: postgres:11.6-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ - name: mysql
+ image: mariadb:10.3
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+
steps:
- name: build
image: python:3.7-alpine3.10
pull: always
commands:
- - scripts/run-tests
+ - scripts/run-full-tests
---
kind: pipeline
name: python-3-7-alpine-3-7
+services:
+ - name: postgresql
+ image: postgres:10.11-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ - name: mysql
+ image: mariadb:10.1
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+
steps:
- name: build
image: python:3.7-alpine3.7
pull: always
commands:
- - scripts/run-tests
+ - scripts/run-full-tests
---
kind: pipeline
name: python-3-8-alpine-3-10
+services:
+ - name: postgresql
+ image: postgres:11.6-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ - name: mysql
+ image: mariadb:10.3
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+
steps:
- name: build
image: python:3.8-alpine3.10
pull: always
commands:
- - scripts/run-tests
+ - scripts/run-full-tests
---
kind: pipeline
name: documentation
diff --git a/.eleventy.js b/.eleventy.js
index 1b5f48b..cc5d9f1 100644
--- a/.eleventy.js
+++ b/.eleventy.js
@@ -32,6 +32,8 @@ module.exports = function(eleventyConfig) {
.use(markdownItReplaceLink)
);
+ eleventyConfig.addPassthroughCopy({"docs/static": "static"});
+
return {
templateFormats: [
"md",
diff --git a/.gitea/issue_template.md b/.gitea/issue_template.md
index d794371..99b1c65 100644
--- a/.gitea/issue_template.md
+++ b/.gitea/issue_template.md
@@ -28,4 +28,8 @@ Use `sudo borg --version`
Use `python3 --version`
+**Database version (if applicable):** [version here]
+
+Use `psql --version` or `mysql --version` on client and server.
+
**operating system and version:** [OS here]
diff --git a/AUTHORS b/AUTHORS
index e227450..4e28c57 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -10,3 +10,5 @@ newtonne: Read encryption password from external file
Robin `ypid` Schneider: Support additional options of Borg and add validate-borgmatic-config command
Scott Squires: Custom archive names
Thomas LÉVEIL: Support for a keep_minutely prune option. Support for the --json option
+
+And many others! See the output of "git log".
diff --git a/NEWS b/NEWS
index a374d4a..8e8adce 100644
--- a/NEWS
+++ b/NEWS
@@ -1,3 +1,86 @@
+1.4.21
+ * #268: Override particular configuration options from the command-line via "--override" flag. See
+ the documentation for more information:
+ https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides
+ * #270: Only trigger "on_error" hooks and monitoring failures for "prune", "create", and "check"
+ actions, and not for other actions.
+ * When pruning with verbosity level 1, list pruned and kept archives. Previously, this information
+ was only shown at verbosity level 2.
+
+1.4.20
+ * Fix repository probing during "borgmatic init" to respect verbosity flag and remote_path option.
+ * #249: Update Healthchecks/Cronitor/Cronhub monitoring integrations to fire for "check" and
+ "prune" actions, not just "create".
+
+1.4.19
+ * #259: Optionally change the internal database dump path via "borgmatic_source_directory" option
+ in location configuration section.
+ * #271: Support piping "borgmatic list" output to grep by logging certain log levels to console
+ stdout and others to stderr.
+ * Retain colored output when piping or redirecting in an interactive terminal.
+ * Add end-to-end tests for database dump and restore. These are run on developer machines with
+ Docker Compose for approximate parity with continuous integration tests.
+
+1.4.18
+ * Fix "--repository" flag to accept relative paths.
+ * Fix "borgmatic umount" so it only runs Borg once instead of once per repository / configuration
+ file.
+ * #253: Mount whole repositories via "borgmatic mount" without any "--archive" flag.
+ * #269: Filter listed paths via "borgmatic list --path" flag.
+
+1.4.17
+ * #235: Pass extra options directly to particular Borg commands, handy for Borg options that
+ borgmatic does not yet support natively. Use "extra_borg_options" in the storage configuration
+ section.
+ * #266: Attempt to repair any inconsistencies found during a consistency check via
+ "borgmatic check --repair" flag.
+
+1.4.16
+ * #256: Fix for "before_backup" hook not triggering an error when the command contains "borg" and
+ has an exit code of 1.
+ * #257: Fix for garbled Borg file listing when using "borgmatic create --progress" with
+ verbosity level 1 or 2.
+ * #260: Fix for missing Healthchecks monitoring payload or HTTP 500 due to incorrect unicode
+ encoding.
+
+1.4.15
+ * Fix for database dump removal incorrectly skipping some database dumps.
+ * #123: Support for mounting an archive as a FUSE filesystem via "borgmatic mount" action, and
+ unmounting via "borgmatic umount". See the documentation for more information:
+ https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#mount-a-filesystem
+
+1.4.14
+ * Show summary log errors regardless of verbosity level, and log the "summary:" header with a log
+ level based on the contained summary logs.
+
+1.4.13
+ * Show full error logs at "--verbosity 0" so you can see command output without upping the
+ verbosity level.
+
+1.4.12
+ * #247: With "borgmatic check", consider Borg warnings as errors.
+ * Dial back the display of inline error logs a bit, so failed command output doesn't appear
+ multiple times in the logs (well, except for the summary).
+
+1.4.11
+ * #241: When using the Healthchecks monitoring hook, include borgmatic logs in the payloads for
+ completion and failure pings.
+ * With --verbosity level 1 or 2, show error logs both inline when they occur and in the summary
+ logs at the bottom. With lower verbosity levels, suppress the summary and show error logs when
+ they occur.
+
+1.4.10
+ * #246: Fix for "borgmatic restore" showing success and incorrectly extracting archive files, even
+ when no databases are configured to restore. As this can overwrite files from the archive and
+ lead to data loss, please upgrade to get the fix before using "borgmatic restore".
+ * Reopen the file given by "--log-file" flag if an external program rotates the log file while
+ borgmatic is running.
+
+1.4.9
+ * #228: Database dump hooks for MySQL/MariaDB, so you can easily dump your databases before backups
+ run.
+ * #243: Fix repository does not exist error with "borgmatic extract" when repository is remote.
+
1.4.8
* Monitor backups with Cronhub hook integration. See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook
diff --git a/README.md b/README.md
index e1ec7f8..09f4e65 100644
--- a/README.md
+++ b/README.md
@@ -2,68 +2,71 @@
title: borgmatic
permalink: index.html
---
-<a href="https://build.torsion.org/witten/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/witten/borgmatic/status.svg?ref=refs/heads/master)</a>
-## Overview
+## It's your data. Keep it that way.
-<img src="https://projects.torsion.org/witten/borgmatic/raw/branch/master/static/borgmatic.png" alt="borgmatic logo" width="150px" style="float: right; padding-left: 1em;">
+<img src="docs/static/borgmatic.png" alt="borgmatic logo" width="150px" style="float: right; padding-left: 1em;">
borgmatic is simple, configuration-driven backup software for servers and
-workstations. Backup all of your machines from the command-line or scheduled
-jobs. No GUI required. Built atop [Borg Backup](https://www.borgbackup.org/),
-borgmatic initiates a backup, prunes any old backups according to a retention
-policy, and validates backups for consistency. borgmatic supports specifying
-your settings in a declarative configuration file, rather than having to put
-them all on the command-line, and handles common errors.
+workstations. Protect your files with client-side encryption. Backup your
+databases too. Monitor it all with integrated third-party services.
-Here's an example config file:
+Here's an example configuration file:
```yaml
location:
- # List of source directories to backup. Globs are expanded.
+ # List of source directories to backup.
source_directories:
- /home
- /etc
- - /var/log/syslog*
# Paths to local or remote repositories.
repositories:
- user@backupserver:sourcehostname.borg
- # Any paths matching these patterns are excluded from backups.
- exclude_patterns:
- - /home/*/.cache
-
retention:
- # Retention policy for how many backups to keep in each category.
+ # Retention policy for how many backups to keep.
keep_daily: 7
keep_weekly: 4
keep_monthly: 6
consistency:
- # List of consistency checks to run: "repository", "archives", etc.
+ # List of checks to run to validate your backups.
checks:
- repository
- archives
hooks:
- # Preparation scripts to run, databases to dump, and monitoring to perform.
+ # Custom preparation scripts to run.
before_backup:
- prepare-for-backup.sh
+
+ # Databases to dump and include in backups.
postgresql_databases:
- name: users
+
+ # Third-party services to notify you if backups aren't happening.
healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c
```
-borgmatic is hosted at <https://torsion.org/borgmatic> with [source code
-available](https://projects.torsion.org/witten/borgmatic). It's also mirrored
-on [GitHub](https://github.com/witten/borgmatic) for convenience.
-
Want to see borgmatic in action? Check out the <a
href="https://asciinema.org/a/203761" target="_blank">screencast</a>.
<script src="https://asciinema.org/a/203761.js" id="asciicast-203761" async></script>
+borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
+
+## Integrations
+
+<a href="https://www.postgresql.org/"><img src="docs/static/postgresql.png" alt="PostgreSQL" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://www.mysql.com/"><img src="docs/static/mysql.png" alt="MySQL" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://www.rsync.net/cgi-bin/borg.cgi?campaign=borg&adgroup=borgmatic"><img src="docs/static/rsyncnet.png" alt="rsync.net" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+<a href="https://www.borgbase.com/?utm_source=borgmatic"><img src="docs/static/borgbase.png" alt="BorgBase" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+
## How-to guides
@@ -117,6 +120,10 @@ Other questions or comments? Contact <mailto:witten@torsion.org>.
### Contributing
+borgmatic is hosted at <https://torsion.org/borgmatic> with [source code
+available](https://projects.torsion.org/witten/borgmatic). It's also mirrored
+on [GitHub](https://github.com/witten/borgmatic) for convenience.
+
If you'd like to contribute to borgmatic development, please feel free to
submit a [Pull Request](https://projects.torsion.org/witten/borgmatic/pulls)
or open an [issue](https://projects.torsion.org/witten/borgmatic/issues) first
@@ -126,3 +133,6 @@ your thing. In general, contributions are very welcome. We don't bite!
Also, please check out the [borgmatic development
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
info on cloning source code, running tests, etc.
+
+<a href="https://build.torsion.org/witten/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/witten/borgmatic/status.svg?ref=refs/heads/master)</a>
+
diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py
index 8f8da23..45e59f2 100644
--- a/borgmatic/borg/check.py
+++ b/borgmatic/borg/check.py
@@ -1,7 +1,7 @@
import logging
from borgmatic.borg import extract
-from borgmatic.execute import execute_command
+from borgmatic.execute import execute_command, execute_command_without_capture
DEFAULT_CHECKS = ('repository', 'archives')
DEFAULT_PREFIX = '{hostname}-'
@@ -91,23 +91,23 @@ def check_archives(
consistency_config,
local_path='borg',
remote_path=None,
+ repair=None,
only_checks=None,
):
'''
Given a local or remote repository path, a storage config dict, a consistency config dict,
- local/remote commands to run, and an optional list of checks to use instead of configured
- checks, check the contained Borg archives for consistency.
+ local/remote commands to run, whether to attempt a repair, and an optional list of checks
+ to use instead of configured checks, check the contained Borg archives for consistency.
If there are no consistency checks to run, skip running them.
'''
checks = _parse_checks(consistency_config, only_checks)
check_last = consistency_config.get('check_last', None)
lock_wait = None
+ extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
if set(checks).intersection(set(DEFAULT_CHECKS + ('data',))):
- remote_path_flags = ('--remote-path', remote_path) if remote_path else ()
lock_wait = storage_config.get('lock_wait', None)
- lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else ()
verbosity_flags = ()
if logger.isEnabledFor(logging.INFO):
@@ -119,14 +119,22 @@ def check_archives(
full_command = (
(local_path, 'check')
+ + (('--repair',) if repair else ())
+ _make_check_flags(checks, check_last, prefix)
- + remote_path_flags
- + lock_wait_flags
+ + (('--remote-path', remote_path) if remote_path else ())
+ + (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ verbosity_flags
+ + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ (repository,)
)
- execute_command(full_command)
+ # The Borg repair option trigger an interactive prompt, which won't work when output is
+ # captured.
+ if repair:
+ execute_command_without_capture(full_command, error_on_warnings=True)
+ return
+
+ execute_command(full_command, error_on_warnings=True)
if 'extract' in checks:
extract.extract_last_archive_dry_run(repository, lock_wait, local_path, remote_path)
diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py
index f3e8c2a..f582fb7 100644
--- a/borgmatic/borg/create.py
+++ b/borgmatic/borg/create.py
@@ -104,16 +104,19 @@ def _make_exclude_flags(location_config, exclude_filename=None):
)
-BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
+DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
-def borgmatic_source_directories():
+def borgmatic_source_directories(borgmatic_source_directory):
'''
Return a list of borgmatic-specific source directories used for state like database backups.
'''
+ if not borgmatic_source_directory:
+ borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
+
return (
- [BORGMATIC_SOURCE_DIRECTORY]
- if os.path.exists(os.path.expanduser(BORGMATIC_SOURCE_DIRECTORY))
+ [borgmatic_source_directory]
+ if os.path.exists(os.path.expanduser(borgmatic_source_directory))
else []
)
@@ -134,7 +137,8 @@ def create_archive(
storage config dict, create a Borg archive and return Borg's JSON output (if any).
'''
sources = _expand_directories(
- location_config['source_directories'] + borgmatic_source_directories()
+ location_config['source_directories']
+ + borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
)
pattern_file = _write_pattern_file(location_config.get('patterns'))
@@ -150,6 +154,7 @@ def create_archive(
files_cache = location_config.get('files_cache')
default_archive_name_format = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
archive_name_format = storage_config.get('archive_name_format', default_archive_name_format)
+ extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '')
full_command = (
(local_path, 'create')
@@ -170,7 +175,11 @@ def create_archive(
+ (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
- + (('--list', '--filter', 'AME-') if logger.isEnabledFor(logging.INFO) and not json else ())
+ + (
+ ('--list', '--filter', 'AME-')
+ if logger.isEnabledFor(logging.INFO) and not json and not progress
+ else ()
+ )
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
+ (
('--stats',)
@@ -181,6 +190,7 @@ def create_archive(
+ (('--dry-run',) if dry_run else ())
+ (('--progress',) if progress else ())
+ (('--json',) if json else ())
+ + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ (
'{repository}::{archive_name_format}'.format(
repository=repository, archive_name_format=archive_name_format
@@ -192,7 +202,7 @@ def create_archive(
# The progress output isn't compatible with captured and logged output, as progress messes with
# the terminal directly.
if progress:
- execute_command_without_capture(full_command)
+ execute_command_without_capture(full_command, error_on_warnings=False)
return
if json:
@@ -202,4 +212,4 @@ def create_archive(
else:
output_log_level = logging.INFO
- return execute_command(full_command, output_log_level)
+ return execute_command(full_command, output_log_level, error_on_warnings=False)
diff --git a/borgmatic/borg/extract.py b/borgmatic/borg/extract.py
index f365942..09af537 100644
--- a/borgmatic/borg/extract.py
+++ b/borgmatic/borg/extract.py
@@ -27,7 +27,7 @@ def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg',
+ (repository,)
)
- list_output = execute_command(full_list_command, output_log_level=None)
+ list_output = execute_command(full_list_command, output_log_level=None, error_on_warnings=False)
try:
last_archive_name = list_output.strip().splitlines()[-1]
@@ -62,6 +62,7 @@ def extract_archive(
remote_path=None,
destination_path=None,
progress=False,
+ error_on_warnings=True,
):
'''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
@@ -82,7 +83,7 @@ def extract_archive(
+ (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--dry-run',) if dry_run else ())
+ (('--progress',) if progress else ())
- + ('::'.join((os.path.abspath(repository), archive)),)
+ + ('::'.join((repository if ':' in repository else os.path.abspath(repository), archive)),)
+ (tuple(paths) if paths else ())
)
@@ -90,10 +91,12 @@ def extract_archive(
# the terminal directly.
if progress:
execute_command_without_capture(
- full_command, working_directory=destination_path, error_on_warnings=True
+ full_command, working_directory=destination_path, error_on_warnings=error_on_warnings
)
return
- # Error on warnings, as Borg only gives a warning if the restore paths don't exist in the
- # archive!
- execute_command(full_command, working_directory=destination_path, error_on_warnings=True)
+ # Error on warnings by default, as Borg only gives a warning if the restore paths don't exist in
+ # the archive!
+ execute_command(
+ full_command, working_directory=destination_path, error_on_warnings=error_on_warnings
+ )
diff --git a/borgmatic/borg/info.py b/borgmatic/borg/info.py
index cbc50d1..3ff9312 100644
--- a/borgmatic/borg/info.py
+++ b/borgmatic/borg/info.py
@@ -39,5 +39,7 @@ def display_archives_info(
)
return execute_command(
- full_command, output_log_level=None if info_arguments.json else logging.WARNING
+ full_command,
+ output_log_level=None if info_arguments.json else logging.WARNING,
+ error_on_warnings=False,
)
diff --git a/borgmatic/borg/init.py b/borgmatic/borg/init.py
index cb787ae..08256ae 100644
--- a/borgmatic/borg/init.py
+++ b/borgmatic/borg/init.py
@@ -11,6 +11,7 @@ INFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
def initialize_repository(
repository,
+ storage_config,
encryption_mode,
append_only=None,
storage_quota=None,
@@ -18,11 +19,17 @@ def initialize_repository(
remote_path=None,
):
'''
- Given a local or remote repository path, a Borg encryption mode, whether the repository should
- be append-only, and the storage quota to use, initialize the repository. If the repository
- already exists, then log and skip initialization.
+ Given a local or remote repository path, a storage configuration dict, a Borg encryption mode,
+ whether the repository should be append-only, and the storage quota to use, initialize the
+ repository. If the repository already exists, then log and skip initialization.
'''
- info_command = (local_path, 'info', repository)
+ info_command = (
+ (local_path, 'info')
+ + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
+ + (('--remote-path', remote_path) if remote_path else ())
+ + (repository,)
+ )
logger.debug(' '.join(info_command))
try:
@@ -33,6 +40,8 @@ def initialize_repository(
if error.returncode != INFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
raise
+ extra_borg_options = storage_config.get('extra_borg_options', {}).get('init', '')
+
init_command = (
(local_path, 'init')
+ (('--encryption', encryption_mode) if encryption_mode else ())
@@ -41,8 +50,9 @@ def initialize_repository(
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--remote-path', remote_path) if remote_path else ())
+ + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ (repository,)
)
# Don't use execute_command() here because it doesn't support interactive prompts.
- execute_command_without_capture(init_command)
+ execute_command_without_capture(init_command, error_on_warnings=False)
diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py
index 854ceff..845f288 100644
--- a/borgmatic/borg/list.py
+++ b/borgmatic/borg/list.py
@@ -36,15 +36,18 @@ def list_archives(repository, storage_config, list_arguments, local_path='borg',
+ make_flags('remote-path', remote_path)
+ make_flags('lock-wait', lock_wait)
+ make_flags_from_arguments(
- list_arguments, excludes=('repository', 'archive', 'successful')
+ list_arguments, excludes=('repository', 'archive', 'paths', 'successful')
)
+ (
'::'.join((repository, list_arguments.archive))
if list_arguments.archive
else repository,
)
+ + (tuple(list_arguments.paths) if list_arguments.paths else ())
)
return execute_command(
- full_command, output_log_level=None if list_arguments.json else logging.WARNING
+ full_command,
+ output_log_level=None if list_arguments.json else logging.WARNING,
+ error_on_warnings=False,
)
diff --git a/borgmatic/borg/mount.py b/borgmatic/borg/mount.py
new file mode 100644
index 0000000..4fccbf9
--- /dev/null
+++ b/borgmatic/borg/mount.py
@@ -0,0 +1,46 @@
+import logging
+
+from borgmatic.execute import execute_command, execute_command_without_capture
+
+logger = logging.getLogger(__name__)
+
+
+def mount_archive(
+ repository,
+ archive,
+ mount_point,
+ paths,
+ foreground,
+ options,
+ storage_config,
+ local_path='borg',
+ remote_path=None,
+):
+ '''
+ Given a local or remote repository path, an optional archive name, a filesystem mount point,
+ zero or more paths to mount from the archive, extra Borg mount options, a storage configuration
+ dict, and optional local and remote Borg paths, mount the archive onto the mount point.
+ '''
+ umask = storage_config.get('umask', None)
+ lock_wait = storage_config.get('lock_wait', None)
+
+ full_command = (
+ (local_path, 'mount')
+ + (('--remote-path', remote_path) if remote_path else ())
+ + (('--umask', str(umask)) if umask else ())
+ + (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ + (('--foreground',) if foreground else ())
+ + (('-o', options) if options else ())
+ + (('::'.join((repository, archive)),) if archive else (repository,))
+ + (mount_point,)
+ + (tuple(paths) if paths else ())
+ )
+
+ # Don't capture the output when foreground mode is used so that ctrl-C can work properly.
+ if foreground:
+ execute_command_without_capture(full_command, error_on_warnings=False)
+ return
+
+ execute_command(full_command, error_on_warnings=False)
diff --git a/borgmatic/borg/prune.py b/borgmatic/borg/prune.py
index ec5963f..2c4811e 100644
--- a/borgmatic/borg/prune.py
+++ b/borgmatic/borg/prune.py
@@ -49,6 +49,7 @@ def prune_archives(
'''
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
+ extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
full_command = (
(local_path, 'prune')
@@ -57,11 +58,16 @@ def prune_archives(
+ (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--stats',) if not dry_run and logger.isEnabledFor(logging.INFO) else ())
- + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ + (('--info', '--list') if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--dry-run',) if dry_run else ())
+ (('--stats',) if stats else ())
+ + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ (repository,)
)
- execute_command(full_command, output_log_level=logging.WARNING if stats else logging.INFO)
+ execute_command(
+ full_command,
+ output_log_level=logging.WARNING if stats else logging.INFO,
+ error_on_warnings=False,
+ )
diff --git a/borgmatic/borg/umount.py b/borgmatic/borg/umount.py
new file mode 100644
index 0000000..d703396
--- /dev/null
+++ b/borgmatic/borg/umount.py
@@ -0,0 +1,20 @@
+import logging
+
+from borgmatic.execute import execute_command
+
+logger = logging.getLogger(__name__)
+
+
+def unmount_archive(mount_point, local_path='borg'):
+ '''
+ Given a mounted filesystem mount point, and an optional local Borg paths, umount the filesystem
+ from the mount point.
+ '''
+ full_command = (
+ (local_path, 'umount')
+ + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ + (mount_point,)
+ )
+
+ execute_command(full_command, error_on_warnings=True)
diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py
index dd5cbeb..6303006 100644
--- a/borgmatic/commands/arguments.py
+++ b/borgmatic/commands/arguments.py
@@ -9,6 +9,8 @@ SUBPARSER_ALIASES = {
'create': ['--create', '-C'],
'check': ['--check', '-k'],
'extract': ['--extract', '-x'],
+ 'mount': ['--mount', '-m'],
+ 'umount': ['--umount', '-u'],
'restore': ['--restore', '-r'],
'list': ['--list', '-l'],
'info': ['--info', '-i'],
@@ -140,21 +142,21 @@ def parse_arguments(*unparsed_arguments):
type=int,
choices=range(-1, 3),
default=0,
- help='Display verbose progress to the console (from none to lots: 0, 1, or 2) or only errors (-1)',
+ help='Display verbose progress to the console (from only errors to very verbose: -1, 0, 1, or 2)',
)
global_group.add_argument(
'--syslog-verbosity',
type=int,
choices=range(-1, 3),
default=0,
- help='Log verbose progress to syslog (from none to lots: 0, 1, or 2) or only errors (-1). Ignored when console is interactive or --log-file is given',
+ help='Log verbose progress to syslog (from only errors to very verbose: -1, 0, 1, or 2). Ignored when console is interactive or --log-file is given',
)
global_group.add_argument(
'--log-file-verbosity',
type=int,
choices=range(-1, 3),
default=0,
- help='Log verbose progress to log file (from none to lots: 0, 1, or 2) or only errors (-1). Only used when --log-file is given',
+ help='Log verbose progress to log file (from only errors to very verbose: -1, 0, 1, or 2). Only used when --log-file is given',
)
global_group.add_argument(
'--log-file',
@@ -163,6 +165,13 @@ def parse_arguments(*unparsed_arguments):
help='Write log messages to this file instead of syslog',
)
global_group.add_argument(
+ '--override',
+ metavar='SECTION.OPTION=VALUE',
+ nargs='+',
+ dest='overrides',
+ help='One or more configuration file options to override with specified values',
+ )
+ global_group.add_argument(
'--version',
dest='version',
default=False,
@@ -172,9 +181,9 @@ def parse_arguments(*unparsed_arguments):
top_level_parser = ArgumentParser(
description='''
- A simple wrapper script for the Borg backup software that creates and prunes backups.
- If none of the action options are given, then borgmatic defaults to: prune, create, and
- check archives.
+ Simple, configuration-driven backup software for servers and workstations. If none of
+ the action options are given, then borgmatic defaults to: prune, create, and check
+ archives.
''',
parents=[global_parser],
)
@@ -265,6 +274,13 @@ def parse_arguments(*unparsed_arguments):
)
check_group = check_parser.add_argument_group('check arguments')
check_group.add_argument(
+ '--repair',
+ dest='repair',
+ default=False,
+ action='store_true',
+ help='Attempt to repair any inconsistencies found (experimental and only for interactive use)',
+ )
+ check_group.add_argument(
'--only',
metavar='CHECK',
choices=('repository', 'archives', 'data', 'extract'),
@@ -312,6 +328,60 @@ def parse_arguments(*unparsed_arguments):
'-h', '--help', action='help', help='Show this help message and exit'
)
+ mount_parser = subparsers.add_parser(
+ 'mount',
+ aliases=SUBPARSER_ALIASES['mount'],
+ help='Mount files from a named archive as a FUSE filesystem',
+ description='Mount a named archive as a FUSE filesystem',
+ add_help=False,
+ )
+ mount_group = mount_parser.add_argument_group('mount arguments')
+ mount_group.add_argument(
+ '--repository',
+ help='Path of repository to use, defaults to the configured repository if there is only one',
+ )
+ mount_group.add_argument('--archive', help='Name of archive to mount')
+ mount_group.add_argument(
+ '--mount-point',
+ metavar='PATH',
+ dest='mount_point',
+ help='Path where filesystem is to be mounted',
+ required=True,
+ )
+ mount_group.add_argument(
+ '--path',
+ metavar='PATH',
+ nargs='+',
+ dest='paths',
+ help='Paths to mount from archive, defaults to the entire archive',
+ )
+ mount_group.add_argument(
+ '--foreground',
+ dest='foreground',
+ default=False,
+ action='store_true',
+ help='Stay in foreground until ctrl-C is pressed',
+ )
+ mount_group.add_argument('--options', dest='options', help='Extra Borg mount options')
+ mount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
+
+ umount_parser = subparsers.add_parser(
+ 'umount',
+ aliases=SUBPARSER_ALIASES['umount'],
+ help='Unmount a FUSE filesystem that was mounted with "borgmatic mount"',
+ description='Unmount a mounted FUSE filesystem',
+ add_help=False,
+ )
+ umount_group = umount_parser.add_argument_group('umount arguments')
+ umount_group.add_argument(
+ '--mount-point',
+ metavar='PATH',
+ dest='mount_point',
+ help='Path of filesystem to unmount',
+ required=True,
+ )
+ umount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
+
restore_parser = subparsers.add_parser(
'restore',
aliases=SUBPARSER_ALIASES['restore'],
@@ -357,6 +427,13 @@ def parse_arguments(*unparsed_arguments):
)
list_group.add_argument('--archive', help='Name of archive to list')
list_group.add_argument(
+ '--path',
+ metavar='PATH',
+ nargs='+',
+ dest='paths',
+ help='Paths to list from archive, defaults to the entire archive',
+ )
+ list_group.add_argument(
'--short', default=False, action='store_true', help='Output only archive or path names'
)
list_group.add_argument('--format', help='Format for file listing')
diff --git a/borgmatic/commands/borgmatic.py b/borgmatic/commands/borgmatic.py
index b88066f..b39668a 100644
--- a/borgmatic/commands/borgmatic.py
+++ b/borgmatic/commands/borgmatic.py
@@ -15,10 +15,12 @@ from borgmatic.borg import extract as borg_extract
from borgmatic.borg import info as borg_info
from borgmatic.borg import init as borg_init
from borgmatic.borg import list as borg_list
+from borgmatic.borg import mount as borg_mount
from borgmatic.borg import prune as borg_prune
+from borgmatic.borg import umount as borg_umount
from borgmatic.commands.arguments import parse_arguments
from borgmatic.config import checks, collect, convert, validate
-from borgmatic.hooks import command, cronhub, cronitor, healthchecks, postgresql
+from borgmatic.hooks import command, dispatch, dump, monitor
from borgmatic.logger import configure_logging, should_do_markup
from borgmatic.signals import configure_signals
from borgmatic.verbosity import verbosity_to_log_level
@@ -50,18 +52,19 @@ def run_configuration(config_filename, config, arguments):
borg_environment.initialize(storage)
encountered_error = None
error_repository = ''
+ prune_create_or_check = {'prune', 'create', 'check'}.intersection(arguments)
- if 'create' in arguments:
- try:
- healthchecks.ping_healthchecks(
- hooks.get('healthchecks'), config_filename, global_arguments.dry_run, 'start'
- )
- cronitor.ping_cronitor(
- hooks.get('cronitor'), config_filename, global_arguments.dry_run, 'run'
- )
- cronhub.ping_cronhub(
- hooks.get('cronhub'), config_filename, global_arguments.dry_run, 'start'
+ try:
+ if prune_create_or_check:
+ dispatch.call_hooks(
+ 'ping_monitor',
+ hooks,
+ config_filename,
+ monitor.MONITOR_HOOK_NAMES,
+ monitor.State.START,
+ global_arguments.dry_run,
)
+ if 'create' in arguments:
command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
@@ -69,14 +72,19 @@ def run_configuration(config_filename, config, arguments):
'pre-backup',
global_arguments.dry_run,
)
- postgresql.dump_databases(
- hooks.get('postgresql_databases'), config_filename, global_arguments.dry_run
- )
- except (OSError, CalledProcessError) as error:
- encountered_error = error
- yield from make_error_log_records(
- '{}: Error running pre-backup hook'.format(config_filename), error
+ dispatch.call_hooks(
+ 'dump_databases',
+ hooks,
+ config_filename,
+ dump.DATABASE_HOOK_NAMES,
+ location,
+ global_arguments.dry_run,
)
+ except (OSError, CalledProcessError) as error:
+ encountered_error = error
+ yield from make_error_log_records(
+ '{}: Error running pre-backup hook'.format(config_filename), error
+ )
if not encountered_error:
for repository_path in location['repositories']:
@@ -99,34 +107,40 @@ def run_configuration(config_filename, config, arguments):
'{}: Error running actions for repository'.format(repository_path), error
)
- if 'create' in arguments and not encountered_error:
+ if not encountered_error:
try:
- postgresql.remove_database_dumps(
- hooks.get('postgresql_databases'), config_filename, global_arguments.dry_run
- )
- command.execute_hook(
- hooks.get('after_backup'),
- hooks.get('umask'),
- config_filename,
- 'post-backup',
- global_arguments.dry_run,
- )
- healthchecks.ping_healthchecks(
- hooks.get('healthchecks'), config_filename, global_arguments.dry_run
- )
- cronitor.ping_cronitor(
- hooks.get('cronitor'), config_filename, global_arguments.dry_run, 'complete'
- )
- cronhub.ping_cronhub(
- hooks.get('cronhub'), config_filename, global_arguments.dry_run, 'finish'
- )
+ if 'create' in arguments:
+ dispatch.call_hooks(
+ 'remove_database_dumps',
+ hooks,
+ config_filename,
+ dump.DATABASE_HOOK_NAMES,
+ location,
+ global_arguments.dry_run,
+ )
+ command.execute_hook(
+ hooks.get('after_backup'),
+ hooks.get('umask'),
+ config_filename,
+ 'post-backup',
+ global_arguments.dry_run,
+ )
+ if {'prune', 'create', 'check'}.intersection(arguments):
+ dispatch.call_hooks(
+ 'ping_monitor',
+ hooks,
+ config_filename,
+ monitor.MONITOR_HOOK_NAMES,
+ monitor.State.FINISH,
+ global_arguments.dry_run,
+ )
except (OSError, CalledProcessError) as error:
encountered_error = error
yield from make_error_log_records(
'{}: Error running post-backup hook'.format(config_filename), error
)
- if encountered_error:
+ if encountered_error and prune_create_or_check:
try:
command.execute_hook(
hooks.get('on_error'),
@@ -138,14 +152,13 @@ def run_configuration(config_filename, config, arguments):
error=encountered_error,
output=getattr(encountered_error, 'output', ''),
)
- healthchecks.ping_healthchecks(
- hooks.get('healthchecks'), config_filename, global_arguments.dry_run, 'fail'
- )
- cronitor.ping_cronitor(
- hooks.get('cronitor'), config_filename, global_arguments.dry_run, 'fail'
- )
- cronhub.ping_cronhub(
- hooks.get('cronhub'), config_filename, global_arguments.dry_run, 'fail'
+ dispatch.call_hooks(
+ 'ping_monitor',
+ hooks,
+ config_filename,
+ monitor.MONITOR_HOOK_NAMES,
+ monitor.State.FAIL,
+ global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
yield from make_error_log_records(
@@ -182,6 +195,7 @@ def run_actions(
logger.info('{}: Initializing repository'.format(repository))
borg_init.initialize_repository(
repository,
+ storage,
arguments['init'].encryption_mode,
arguments['init'].append_only,
arguments['init'].storage_quota,
@@ -222,10 +236,13 @@ def run_actions(
consistency,
local_path=local_path,
remote_path=remote_path,
+ repair=arguments['check'].repair,
only_checks=arguments['check'].only,
)
if 'extract' in arguments:
- if arguments['extract'].repository is None or repository == arguments['extract'].repository:
+ if arguments['extract'].repository is None or validate.repositories_match(
+ repository, arguments['extract'].repository
+ ):
logger.info(
'{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
)
@@ -241,8 +258,32 @@ def run_actions(
destination_path=arguments['extract'].destination,
progress=arguments['extract'].progress,
)
+ if 'mount' in arguments:
+ if arguments['mount'].repository is None or validate.repositories_match(
+ repository, arguments['mount'].repository
+ ):
+ if arguments['mount'].archive:
+ logger.info(
+ '{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
+ )
+ else:
+ logger.info('{}: Mounting repository'.format(repository))
+
+ borg_mount.mount_archive(
+ repository,
+ arguments['mount'].archive,
+ arguments['mount'].mount_point,
+ arguments['mount'].paths,
+ arguments['mount'].foreground,
+ arguments['mount'].options,
+ storage,
+ local_path=local_path,
+ remote_path=remote_path,
+ )
if 'restore' in arguments:
- if arguments['restore'].repository is None or repository == arguments['restore'].repository:
+ if arguments['restore'].repository is None or validate.repositories_match(
+ repository, arguments['restore'].repository
+ ):
logger.info(
'{}: Restoring databases from archive {}'.format(
repository, arguments['restore'].archive
@@ -254,33 +295,58 @@ def run_actions(
restore_names = []
# Extract dumps for the named databases from the archive.
- dump_patterns = postgresql.make_database_dump_patterns(restore_names)
+ dump_patterns = dispatch.call_hooks(
+ 'make_database_dump_patterns',
+ hooks,
+ repository,
+ dump.DATABASE_HOOK_NAMES,
+ location,
+ restore_names,
+ )
+
borg_extract.extract_archive(
global_arguments.dry_run,
repository,
arguments['restore'].archive,
- postgresql.convert_glob_patterns_to_borg_patterns(dump_patterns),
+ dump.convert_glob_patterns_to_borg_patterns(
+ dump.flatten_dump_patterns(dump_patterns, restore_names)
+ ),
location,
storage,
local_path=local_path,
remote_path=remote_path,
destination_path='/',
progress=arguments['restore'].progress,
+ # We don't want glob patterns that don't match to error.
+ error_on_warnings=False,
)
- # Map the restore names to the corresponding database configurations.
- databases = list(
- postgresql.get_database_configurations(
- hooks.get('postgresql_databases'),
- restore_names or postgresql.get_database_names_from_dumps(dump_patterns),
- )
+ # Map the restore names or detected dumps to the corresponding database configurations.
+ restore_databases = dump.get_per_hook_database_configurations(
+ hooks, restore_names, dump_patterns
)
# Finally, restore the databases and cleanup the dumps.
- postgresql.restore_database_dumps(databases, repository, global_arguments.dry_run)
- postgresql.remove_database_dumps(databases, repository, global_arguments.dry_run)
+ dispatch.call_hooks(
+ 'restore_database_dumps',
+ restore_databases,
+ repository,
+ dump.DATABASE_HOOK_NAMES,
+ location,
+ global_arguments.dry_run,
+ )
+ dispatch.call_hooks(
+ 'remove_database_dumps',
+ restore_databases,
+ repository,
+ dump.DATABASE_HOOK_NAMES,
+ location,
+ global_arguments.dry_run,
+ )
if 'list' in arguments:
- if arguments['list'].repository is None or repository == arguments['list'].repository:
+ if arguments['list'].repository is None or validate.repositories_match(
+ repository, arguments['list'].repository
+ ):
logger.info('{}: Listing archives'.format(repository))
json_output = borg_list.list_archives(
repository,
@@ -292,7 +358,9 @@ def run_actions(
if json_output:
yield json.loads(json_output)
if 'info' in arguments:
- if arguments['info'].repository is None or repository == arguments['info'].repository:
+ if arguments['info'].repository is None or validate.repositories_match(
+ repository, arguments['info'].repository
+ ):
logger.info('{}: Displaying summary info for archives'.format(repository))
json_output = borg_info.display_archives_info(
repository,
@@ -305,7 +373,7 @@ def run_actions(
yield json.loads(json_output)
-def load_configurations(config_filenames):
+def load_configurations(config_filenames, overrides=None):
'''
Given a sequence of configuration filenames, load and validate each configuration file. Return
the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
@@ -319,7 +387,7 @@ def load_configurations(config_filenames):
for config_filename in config_filenames:
try:
configs[config_filename] = validate.parse_configuration(
- config_filename, validate.schema_filename()
+ config_filename, validate.schema_filename(), overrides
)
except (ValueError, OSError, validate.Validation_error) as error:
logs.extend(
@@ -340,39 +408,55 @@ def load_configurations(config_filenames):
return (configs, logs)
+def log_record(suppress_log=False, **kwargs):
+ '''
+ Create a log record based on the given makeLogRecord() arguments, one of which must be
+ named "levelno". Log the record (unless suppress log is set) and return it.
+ '''
+ record = logging.makeLogRecord(kwargs)
+ if suppress_log:
+ return record
+
+ logger.handle(record)
+ return record
+
+
def make_error_log_records(message, error=None):
'''
Given error message text and an optional exception object, yield a series of logging.LogRecord
- instances with error summary information.
+ instances with error summary information. As a side effect, log each record.
'''
if not error:
- yield logging.makeLogRecord(
- dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
- )
+ yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
return
try:
raise error
except CalledProcessError as error:
- yield logging.makeLogRecord(
- dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
- )
+ yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
if error.output:
- yield logging.makeLogRecord(
- dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output)
+ # Suppress these logs for now and save full error output for the log summary at the end.
+ yield log_record(
+ levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output, suppress_log=True
)
- yield logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error))
+ yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
except (ValueError, OSError) as error:
- yield logging.makeLogRecord(
- dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
- )
- yield logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error))
+ yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
+ yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
except: # noqa: E722
# Raising above only as a means of determining the error type. Swallow the exception here
# because we don't want the exception to propagate out of this function.
pass
+def get_local_path(configs):
+ '''
+ Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
+ set.
+ '''
+ return next(iter(configs.values())).get('location', {}).get('local_path', 'borg')
+
+
def collect_configuration_run_summary_logs(configs, arguments):
'''
Given a dict of configuration filename to corresponding parsed configuration, and parsed
@@ -388,6 +472,8 @@ def collect_configuration_run_summary_logs(configs, arguments):
repository = arguments['extract'].repository
elif 'list' in arguments and arguments['list'].archive:
repository = arguments['list'].repository
+ elif 'mount' in arguments:
+ repository = arguments['mount'].repository
else:
repository = None
@@ -441,6 +527,15 @@ def collect_configuration_run_summary_logs(configs, arguments):
if results:
json_results.extend(results)
+ if 'umount' in arguments:
+ logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
+ try:
+ borg_umount.unmount_archive(
+ mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs)
+ )
+ except (CalledProcessError, OSError) as error:
+ yield from make_error_log_records('Error unmounting mount point', error)
+
if json_results:
sys.stdout.write(json.dumps(json_results))
@@ -490,7 +585,7 @@ def main(): # pragma: no cover
sys.exit(0)
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
- configs, parse_logs = load_configurations(config_filenames)
+ configs, parse_logs = load_configurations(config_filenames, global_arguments.overrides)
colorama.init(autoreset=True, strip=not should_do_markup(global_arguments.no_color, configs))
try:
@@ -508,15 +603,18 @@ def main(): # pragma: no cover
logger.debug('Ensuring legacy configuration is upgraded')
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
- summary_logs = list(collect_configuration_run_summary_logs(configs, arguments))
+ summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments))
+ summary_logs_max_level = max(log.levelno for log in summary_logs)
+
+ for message in ('', 'summary:'):
+ log_record(
+ levelno=summary_logs_max_level,
+ levelname=logging.getLevelName(summary_logs_max_level),
+ msg=message,
+ )
- logger.info('')
- logger.info('summary:')
- [
+ for log in summary_logs:
logger.handle(log)
- for log in parse_logs + summary_logs
- if log.levelno >= logger.getEffectiveLevel()
- ]
- if any(log.levelno == logging.CRITICAL for log in summary_logs):
+ if summary_logs_max_level >= logging.CRITICAL:
exit_with_help_link()
diff --git a/borgmatic/config/override.py b/borgmatic/config/override.py
new file mode 100644
index 0000000..eb86077
--- /dev/null
+++ b/borgmatic/config/override.py
@@ -0,0 +1,71 @@
+import io
+
+import ruamel.yaml
+
+
+def set_values(config, keys, value):
+ '''
+ Given a hierarchy of configuration dicts, a sequence of parsed key strings, and a string value,
+ descend into the hierarchy based on the keys to set the value into the right place.
+ '''
+ if not keys:
+ return
+
+ first_key = keys[0]
+ if len(keys) == 1:
+ config[first_key] = value
+ return
+
+ if first_key not in config:
+ config[first_key] = {}
+
+ set_values(config[first_key], keys[1:], value)
+
+
+def convert_value_type(value):
+ '''
+ Given a string value, determine its logical type (string, boolean, integer, etc.), and return it
+ converted to that type.
+ '''
+ return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value))
+
+
+def parse_overrides(raw_overrides):
+ '''
+ Given a sequence of configuration file override strings in the form of "section.option=value",
+ parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For
+ instance, given the following raw overrides:
+
+ ['section.my_option=value1', 'section.other_option=value2']
+
+ ... return this:
+
+ (
+ (('section', 'my_option'), 'value1'),
+ (('section', 'other_option'), 'value2'),
+ )
+
+ Raise ValueError if an override can't be parsed.
+ '''
+ if not raw_overrides:
+ return ()
+
+ try:
+ return tuple(
+ (tuple(raw_keys.split('.')), convert_value_type(value))
+ for raw_override in raw_overrides
+ for raw_keys, value in (raw_override.split('=', 1),)
+ )
+ except ValueError:
+ raise ValueError('Invalid override. Make sure you use the form: SECTION.OPTION=VALUE')
+
+
+def apply_overrides(config, raw_overrides):
+ '''
+ Given a sequence of configuration file override strings in the form of "section.option=value"
+ and a configuration dict, parse each override and set it the configuration dict.
+ '''
+ overrides = parse_overrides(raw_overrides)
+
+ for (keys, value) in overrides:
+ set_values(config, keys, value)
diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml
index a1825c2..c58ebde 100644
--- a/borgmatic/config/schema.yaml
+++ b/borgmatic/config/schema.yaml
@@ -137,6 +137,14 @@ map:
desc: |
Exclude files with the NODUMP flag. Defaults to false.
example: true
+ borgmatic_source_directory:
+ type: str
+ desc: |
+ Path for additional source files used for temporary internal state like
+ borgmatic database dumps. Note that changing this path prevents "borgmatic
+ restore" from finding any database dumps created before the change. Defaults
+ to ~/.borgmatic
+ example: /tmp/borgmatic
storage:
desc: |
Repository storage options. See
@@ -245,6 +253,29 @@ map:
Bypass Borg error about a previously unknown unencrypted repository. Defaults to
false.
example: true
+ extra_borg_options:
+ map:
+ init:
+ type: str
+ desc: Extra command-line options to pass to "borg init".
+ example: "--make-parent-dirs"
+ prune:
+ type: str
+ desc: Extra command-line options to pass to "borg prune".
+ example: "--save-space"
+ create:
+ type: str
+ desc: Extra command-line options to pass to "borg create".
+ example: "--no-files-cache"
+ check:
+ type: str
+ desc: Extra command-line options to pass to "borg check".
+ example: "--save-space"
+ desc: |
+ Additional options to pass directly to particular Borg commands, handy for Borg
+ options that borgmatic does not yet support natively. Note that borgmatic does
+ not perform any validation on these options. Running borgmatic with
+ "--verbosity 2" shows the exact Borg command-line invocation.
retention:
desc: |
Retention policy for how many backups to keep in each category. See
@@ -434,6 +465,54 @@ map:
directories at runtime, backed up, and then removed afterwards. Requires
pg_dump/pg_dumpall/pg_restore commands. See
https://www.postgresql.org/docs/current/app-pgdump.html for details.
+ mysql_databases:
+ seq:
+ - map:
+ name:
+ required: true
+ type: str
+ desc: |
+ Database name (required if using this hook). Or "all" to dump all
+ databases on the host.
+ example: users
+ hostname:
+ type: str
+ desc: |
+ Database hostname to connect to. Defaults to connecting via local
+ Unix socket.
+ example: database.example.org
+ port:
+ type: int
+ desc: Port to connect to. Defaults to 3306.
+ example: 3307
+ username:
+ type: str
+ desc: |
+ Username with which to connect to the database. Defaults to the
+ username of the current user.
+ example: dbuser
+ password:
+ type: str
+ desc: |
+ Password with which to connect to the database. Omitting a password
+ will only work if MySQL is configured to trust the configured
+ username without a password.
+ example: trustsome1
+ options:
+ type: str
+ desc: |
+ Additional mysqldump options to pass directly to the dump command,
+ without performing any validation on them. See
+ https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or
+ https://mariadb.com/kb/en/library/mysqldump/ for details.
+ example: --skip-comments
+ desc: |
+ List of one or more MySQL/MariaDB databases to dump before creating a backup,
+ run once per configuration file. The database dumps are added to your source
+ directories at runtime, backed up, and then removed afterwards. Requires
+ mysqldump/mysql commands (from either MySQL or MariaDB). See
+ https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or
+ https://mariadb.com/kb/en/library/mysqldump/ for details.
healthchecks:
type: str
desc: |
diff --git a/borgmatic/config/validate.py b/borgmatic/config/validate.py
index 8c9e8e9..b7d34a9 100644
--- a/borgmatic/config/validate.py
+++ b/borgmatic/config/validate.py
@@ -1,11 +1,12 @@
import logging
+import os
import pkg_resources
import pykwalify.core
import pykwalify.errors
import ruamel.yaml
-from borgmatic.config import load
+from borgmatic.config import load, override
def schema_filename():
@@ -81,11 +82,12 @@ def remove_examples(schema):
return schema
-def parse_configuration(config_filename, schema_filename):
+def parse_configuration(config_filename, schema_filename, overrides=None):
'''
- Given the path to a config filename in YAML format and the path to a schema filename in
- pykwalify YAML schema format, return the parsed configuration as a data structure of nested
- dicts and lists corresponding to the schema. Example return value:
+ Given the path to a config filename in YAML format, the path to a schema filename in pykwalify
+ YAML schema format, a sequence of configuration file override strings in the form of
+ "section.option=value", return the parsed configuration as a data structure of nested dicts and
+ lists corresponding to the schema. Example return value:
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
@@ -101,6 +103,8 @@ def parse_configuration(config_filename, schema_filename):
except (ruamel.yaml.error.YAMLError, RecursionError) as error:
raise Validation_error(config_filename, (str(error),))
+ override.apply_overrides(config, overrides)
+
validator = pykwalify.core.Core(source_data=config, schema_data=remove_examples(schema))
parsed_result = validator.validate(raise_exception=False)
@@ -112,6 +116,24 @@ def parse_configuration(config_filename, schema_filename):
return parsed_result
+def normalize_repository_path(repository):
+ '''
+ Given a repository path, return the absolute path of it (for local repositories).
+ '''
+ # A colon in the repository indicates it's a remote repository. Bail.
+ if ':' in repository:
+ return repository
+
+ return os.path.abspath(repository)
+
+
+def repositories_match(first, second):
+ '''
+ Given two repository paths (relative and/or absolute), return whether they match.
+ '''
+ return normalize_repository_path(first) == normalize_repository_path(second)
+
+
def guard_configuration_contains_repository(repository, configurations):
'''
Given a repository path and a dict mapping from config filename to corresponding parsed config
@@ -133,9 +155,7 @@ def guard_configuration_contains_repository(repository, configurations):
if count > 1:
raise ValueError(
- 'Can\'t determine which repository to use. Use --repository option to disambiguate'.format(
- repository
- )
+ 'Can\'t determine which repository to use. Use --repository option to disambiguate'
)
return
@@ -145,7 +165,7 @@ def guard_configuration_contains_repository(repository, configurations):
config_repository
for config in configurations.values()
for config_repository in config['location']['repositories']
- if repository == config_repository
+ if repositories_match(repository, config_repository)
)
)
diff --git a/borgmatic/execute.py b/borgmatic/execute.py
index c628b4a..0d5047c 100644
--- a/borgmatic/execute.py
+++ b/borgmatic/execute.py
@@ -9,32 +9,28 @@ ERROR_OUTPUT_MAX_LINE_COUNT = 25
BORG_ERROR_EXIT_CODE = 2
-def exit_code_indicates_error(command, exit_code, error_on_warnings=False):
+def exit_code_indicates_error(command, exit_code, error_on_warnings=True):
'''
Return True if the given exit code from running the command corresponds to an error.
+ If error on warnings is False, then treat exit code 1 as a warning instead of an error.
'''
- # If we're running something other than Borg, treat all non-zero exit codes as errors.
- if 'borg' in command[0] and not error_on_warnings:
- return bool(exit_code >= BORG_ERROR_EXIT_CODE)
+ if error_on_warnings:
+ return bool(exit_code != 0)
- return bool(exit_code != 0)
+ return bool(exit_code >= BORG_ERROR_EXIT_CODE)
-def execute_and_log_output(
- full_command, output_log_level, shell, environment, working_directory, error_on_warnings
-):
+def log_output(command, process, output_buffer, output_log_level, error_on_warnings):
+ '''
+ Given a command already executed, its process opened by subprocess.Popen(), and the process'
+ relevant output buffer (stderr or stdout), log its output with the requested log level.
+ Additionally, raise a CalledProcessException if the process exits with an error (or a warning,
+ if error on warnings is True).
+ '''
last_lines = []
- process = subprocess.Popen(
- full_command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=shell,
- env=environment,
- cwd=working_directory,
- )
while process.poll() is None:
- line = process.stdout.readline().rstrip().decode()
+ line = output_buffer.readline().rstrip().decode()
if not line:
continue
@@ -46,41 +42,48 @@ def execute_and_log_output(
logger.log(output_log_level, line)
- remaining_output = process.stdout.read().rstrip().decode()
+ remaining_output = output_buffer.read().rstrip().decode()
if remaining_output: # pragma: no cover
logger.log(output_log_level, remaining_output)
exit_code = process.poll()
- if exit_code_indicates_error(full_command, exit_code, error_on_warnings):
+ if exit_code_indicates_error(command, exit_code, error_on_warnings):
# If an error occurs, include its output in the raised exception so that we don't
# inadvertently hide error output.
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
last_lines.insert(0, '...')
- raise subprocess.CalledProcessError(
- exit_code, ' '.join(full_command), '\n'.join(last_lines)
- )
+ raise subprocess.CalledProcessError(exit_code, ' '.join(command), '\n'.join(last_lines))
def execute_command(
full_command,
output_log_level=logging.INFO,
+ output_file=None,
+ input_file=None,
shell=False,
extra_environment=None,
working_directory=None,
- error_on_warnings=False,
+ error_on_warnings=True,
):
'''
Execute the given command (a sequence of command/argument strings) and log its output at the
- given log level. If output log level is None, instead capture and return the output. If
- shell is True, execute the command within a shell. If an extra environment dict is given, then
- use it to augment the current environment, and pass the result into the command. If a working
- directory is given, use that as the present working directory when running the command.
+ given log level. If output log level is None, instead capture and return the output. If an
+ open output file object is given, then write stdout to the file and only log stderr (but only
+ if an output log level is set). If an open input file object is given, then read stdin from the
+ file. If shell is True, execute the command within a shell. If an extra environment dict is
+ given, then use it to augment the current environment, and pass the result into the command. If
+ a working directory is given, use that as the present working directory when running the
+ command. If error on warnings is False, then treat exit code 1 as a warning instead of an error.
Raise subprocesses.CalledProcessError if an error occurs while running the command.
'''
- logger.debug(' '.join(full_command))
+ logger.debug(
+ ' '.join(full_command)
+ + (' < {}'.format(input_file.name) if input_file else '')
+ + (' > {}'.format(output_file.name) if output_file else '')
+ )
environment = {**os.environ, **extra_environment} if extra_environment else None
if output_log_level is None:
@@ -89,24 +92,32 @@ def execute_command(
)
return output.decode() if output is not None else None
else:
- execute_and_log_output(
+ process = subprocess.Popen(
full_command,
- output_log_level,
+ stdin=input_file,
+ stdout=output_file or subprocess.PIPE,
+ stderr=subprocess.PIPE if output_file else subprocess.STDOUT,
shell=shell,
- environment=environment,
- working_directory=working_directory,
- error_on_warnings=error_on_warnings,
+ env=environment,
+ cwd=working_directory,
+ )
+ log_output(
+ full_command,
+ process,
+ process.stderr if output_file else process.stdout,
+ output_log_level,
+ error_on_warnings,
)
-def execute_command_without_capture(full_command, working_directory=None, error_on_warnings=False):
+def execute_command_without_capture(full_command, working_directory=None, error_on_warnings=True):
'''
Execute the given command (a sequence of command/argument strings), but don't capture or log its
output in any way. This is necessary for commands that monkey with the terminal (e.g. progress
display) or provide interactive prompts.
If a working directory is given, use that as the present working directory when running the
- command.
+ command. If error on warnings is False, then treat exit code 1 as a warning instead of an error.
'''
logger.debug(' '.join(full_command))
diff --git a/borgmatic/hooks/cronhub.py b/borgmatic/hooks/cronhub.py
index 480bb45..a0d0ac9 100644
--- a/borgmatic/hooks/cronhub.py
+++ b/borgmatic/hooks/cronhub.py
@@ -2,23 +2,29 @@ import logging
import requests
+from borgmatic.hooks import monitor
+
logger = logging.getLogger(__name__)
+MONITOR_STATE_TO_CRONHUB = {
+ monitor.State.START: 'start',
+ monitor.State.FINISH: 'finish',
+ monitor.State.FAIL: 'fail',
+}
+
-def ping_cronhub(ping_url, config_filename, dry_run, state):
+def ping_monitor(ping_url, config_filename, state, dry_run):
'''
- Ping the given Cronhub URL, substituting in the state string. Use the given configuration
+ Ping the given Cronhub URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything.
'''
- if not ping_url:
- logger.debug('{}: No Cronhub hook set'.format(config_filename))
- return
-
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
- formatted_state = '/{}/'.format(state)
+ formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
ping_url = ping_url.replace('/start/', formatted_state).replace('/ping/', formatted_state)
- logger.info('{}: Pinging Cronhub {}{}'.format(config_filename, state, dry_run_label))
+ logger.info(
+ '{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
+ )
logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url))
if not dry_run:
diff --git a/borgmatic/hooks/cronitor.py b/borgmatic/hooks/cronitor.py
index 4bcc0d4..65ad109 100644
--- a/borgmatic/hooks/cronitor.py
+++ b/borgmatic/hooks/cronitor.py
@@ -2,22 +2,28 @@ import logging
import requests
+from borgmatic.hooks import monitor
+
logger = logging.getLogger(__name__)
+MONITOR_STATE_TO_CRONITOR = {
+ monitor.State.START: 'run',
+ monitor.State.FINISH: 'complete',
+ monitor.State.FAIL: 'fail',
+}
+
-def ping_cronitor(ping_url, config_filename, dry_run, append):
+def ping_monitor(ping_url, config_filename, state, dry_run):
'''
- Ping the given Cronitor URL, appending the append string. Use the given configuration filename
- in any log entries. If this is a dry run, then don't actually ping anything.
+ Ping the given Cronitor URL, modified with the monitor.State. Use the given configuration
+ filename in any log entries. If this is a dry run, then don't actually ping anything.
'''
- if not ping_url:
- logger.debug('{}: No Cronitor hook set'.format(config_filename))
- return
-
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
- ping_url = '{}/{}'.format(ping_url, append)
+ ping_url = '{}/{}'.format(ping_url, MONITOR_STATE_TO_CRONITOR[state])
- logger.info('{}: Pinging Cronitor {}{}'.format(config_filename, append, dry_run_label))
+ logger.info(
+ '{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
+ )
logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url))
if not dry_run:
diff --git a/borgmatic/hooks/dispatch.py b/borgmatic/hooks/dispatch.py
new file mode 100644
index 0000000..206b0d1
--- /dev/null
+++ b/borgmatic/hooks/dispatch.py
@@ -0,0 +1,61 @@
+import logging
+
+from borgmatic.hooks import cronhub, cronitor, healthchecks, mysql, postgresql
+
+logger = logging.getLogger(__name__)
+
+HOOK_NAME_TO_MODULE = {
+ 'healthchecks': healthchecks,
+ 'cronitor': cronitor,
+ 'cronhub': cronhub,
+ 'postgresql_databases': postgresql,
+ 'mysql_databases': mysql,
+}
+
+
+def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
+ '''
+ Given the hooks configuration dict and a prefix to use in log entries, call the requested
+ function of the Python module corresponding to the given hook name. Supply that call with the
+ configuration for this hook, the log prefix, and any given args and kwargs. Return any return
+ value.
+
+ If the hook name is not present in the hooks configuration, then bail without calling anything.
+
+ Raise ValueError if the hook name is unknown.
+ Raise AttributeError if the function name is not found in the module.
+ Raise anything else that the called function raises.
+ '''
+ config = hooks.get(hook_name)
+ if not config:
+ logger.debug('{}: No {} hook configured.'.format(log_prefix, hook_name))
+ return
+
+ try:
+ module = HOOK_NAME_TO_MODULE[hook_name]
+ except KeyError:
+ raise ValueError('Unknown hook name: {}'.format(hook_name))
+
+ logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
+ return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
+
+
+def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
+ '''
+ Given the hooks configuration dict and a prefix to use in log entries, call the requested
+ function of the Python module corresponding to each given hook name. Supply each call with the
+ configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
+ values into a dict from hook name to return value.
+
+ If the hook name is not present in the hooks configuration, then don't call the function for it,
+ and omit it from the return values.
+
+ Raise ValueError if the hook name is unknown.
+ Raise AttributeError if the function name is not found in the module.
+ Raise anything else that a called function raises. An error stops calls to subsequent functions.
+ '''
+ return {
+ hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
+ for hook_name in hook_names
+ if hook_name in hooks
+ }
diff --git a/borgmatic/hooks/dump.py b/borgmatic/hooks/dump.py
new file mode 100644
index 0000000..54db1d2
--- /dev/null
+++ b/borgmatic/hooks/dump.py
@@ -0,0 +1,175 @@
+import glob
+import logging
+import os
+
+from borgmatic.borg.create import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
+
+logger = logging.getLogger(__name__)
+
+DATABASE_HOOK_NAMES = ('postgresql_databases', 'mysql_databases')
+
+
+def make_database_dump_path(borgmatic_source_directory, database_hook_name):
+ '''
+ Given a borgmatic source directory (or None) and a database hook name, construct a database dump
+ path.
+ '''
+ if not borgmatic_source_directory:
+ borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
+
+ return os.path.join(borgmatic_source_directory, database_hook_name)
+
+
+def make_database_dump_filename(dump_path, name, hostname=None):
+ '''
+ Based on the given dump directory path, database name, and hostname, return a filename to use
+ for the database dump. The hostname defaults to localhost.
+
+ Raise ValueError if the database name is invalid.
+ '''
+ if os.path.sep in name:
+ raise ValueError('Invalid database name {}'.format(name))
+
+ return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
+
+
+def flatten_dump_patterns(dump_patterns, names):
+ '''
+ Given a dict from a database hook name to glob patterns matching the dumps for the named
+ databases, flatten out all the glob patterns into a single sequence, and return it.
+
+ Raise ValueError if there are no resulting glob patterns, which indicates that databases are not
+ configured in borgmatic's configuration.
+ '''
+ flattened = [pattern for patterns in dump_patterns.values() for pattern in patterns]
+
+ if not flattened:
+ raise ValueError(
+ 'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
+ ', '.join(names) or '"all"'
+ )
+ )
+
+ return flattened
+
+
+def remove_database_dumps(dump_path, databases, database_type_name, log_prefix, dry_run):
+ '''
+ Remove the database dumps for the given databases in the dump directory path. The databases are
+ supplied as a sequence of dicts, one dict describing each database as per the configuration
+ schema. Use the name of the database type and the log prefix in any log entries. If this is a
+ dry run, then don't actually remove anything.
+ '''
+ if not databases:
+ logger.debug('{}: No {} databases configured'.format(log_prefix, database_type_name))
+ return
+
+ dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
+
+ logger.info(
+ '{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
+ )
+
+ for database in databases:
+ dump_filename = make_database_dump_filename(
+ dump_path, database['name'], database.get('hostname')
+ )
+
+ logger.debug(
+ '{}: Removing {} database dump {} from {}{}'.format(
+ log_prefix, database_type_name, database['name'], dump_filename, dry_run_label
+ )
+ )
+ if dry_run:
+ continue
+
+ os.remove(dump_filename)
+ dump_file_dir = os.path.dirname(dump_filename)
+
+ if len(os.listdir(dump_file_dir)) == 0:
+ os.rmdir(dump_file_dir)
+
+
+def convert_glob_patterns_to_borg_patterns(patterns):
+ '''
+ Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
+ patterns like "sh:etc/*".
+ '''
+ return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
+
+
+def get_database_names_from_dumps(patterns):
+ '''
+ Given a sequence of database dump patterns, find the corresponding database dumps on disk and
+ return the database names from their filenames.
+ '''
+ return [os.path.basename(dump_path) for pattern in patterns for dump_path in glob.glob(pattern)]
+
+
+def get_database_configurations(databases, names):
+ '''
+ Given the full database configuration dicts as per the configuration schema, and a sequence of
+ database names, filter down and yield the configuration for just the named databases.
+ Additionally, if a database configuration is named "all", project out that configuration for
+ each named database.
+ '''
+ named_databases = {database['name']: database for database in databases}
+
+ for name in names:
+ database = named_databases.get(name)
+ if database:
+ yield database
+ continue
+
+ if 'all' in named_databases:
+ yield {**named_databases['all'], **{'name': name}}
+ continue
+
+
+def get_per_hook_database_configurations(hooks, names, dump_patterns):
+ '''
+ Given the hooks configuration dict as per the configuration schema, a sequence of database
+ names to restore, and a dict from database hook name to glob patterns for matching dumps,
+ filter down the configuration for just the named databases.
+
+ If there are no named databases given, then find the corresponding database dumps on disk and
+ use the database names from their filenames. Additionally, if a database configuration is named
+ "all", project out that configuration for each named database.
+
+ Return the results as a dict from database hook name to a sequence of database configuration
+ dicts for that database type.
+
+ Raise ValueError if one of the database names cannot be matched to a database in borgmatic's
+ database configuration.
+ '''
+ hook_databases = {
+ hook_name: list(
+ get_database_configurations(
+ hooks.get(hook_name),
+ names or get_database_names_from_dumps(dump_patterns[hook_name]),
+ )
+ )
+ for hook_name in DATABASE_HOOK_NAMES
+ if hook_name in hooks
+ }
+
+ if not names or 'all' in names:
+ if not any(hook_databases.values()):
+ raise ValueError(
+ 'Cannot restore database "all", as there are no database dumps in the archive'
+ )
+
+ return hook_databases
+
+ found_names = {
+ database['name'] for databases in hook_databases.values() for database in databases
+ }
+ missing_names = sorted(set(names) - found_names)
+ if missing_names:
+ raise ValueError(
+ 'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
+ ', '.join(missing_names)
+ )
+ )
+
+ return hook_databases
diff --git a/borgmatic/hooks/healthchecks.py b/borgmatic/hooks/healthchecks.py
index 829e80d..a116205 100644
--- a/borgmatic/hooks/healthchecks.py
+++ b/borgmatic/hooks/healthchecks.py
@@ -2,18 +2,79 @@ import logging
import requests
+from borgmatic.hooks import monitor
+
logger = logging.getLogger(__name__)
+MONITOR_STATE_TO_HEALTHCHECKS = {
+ monitor.State.START: 'start',
+ monitor.State.FINISH: None, # Healthchecks doesn't append to the URL for the finished state.
+ monitor.State.FAIL: 'fail',
+}
+
+PAYLOAD_TRUNCATION_INDICATOR = '...\n'
+PAYLOAD_LIMIT_BYTES = 10 * 1024 - len(PAYLOAD_TRUNCATION_INDICATOR)
+
+
+class Forgetful_buffering_handler(logging.Handler):
+ '''
+ A buffering log handler that stores log messages in memory, and throws away messages (oldest
+ first) once a particular capacity in bytes is reached.
+ '''
+
+ def __init__(self, byte_capacity):
+ super().__init__()
+
+ self.byte_capacity = byte_capacity
+ self.byte_count = 0
+ self.buffer = []
+ self.forgot = False
+
+ def emit(self, record):
+ message = record.getMessage() + '\n'
+ self.byte_count += len(message)
+ self.buffer.append(message)
+
+ while self.byte_count > self.byte_capacity and self.buffer:
+ self.byte_count -= len(self.buffer[0])
+ self.buffer.pop(0)
+ self.forgot = True
+
-def ping_healthchecks(ping_url_or_uuid, config_filename, dry_run, append=None):
+def format_buffered_logs_for_payload():
'''
- Ping the given Healthchecks URL or UUID, appending the append string if any. Use the given
+ Get the handler previously added to the root logger, and slurp buffered logs out of it to
+ send to Healthchecks.
+ '''
+ try:
+ buffering_handler = next(
+ handler
+ for handler in logging.getLogger().handlers
+ if isinstance(handler, Forgetful_buffering_handler)
+ )
+ except StopIteration:
+ # No handler means no payload.
+ return ''
+
+ payload = ''.join(message for message in buffering_handler.buffer)
+
+ if buffering_handler.forgot:
+ return PAYLOAD_TRUNCATION_INDICATOR + payload
+
+ return payload
+
+
+def ping_monitor(ping_url_or_uuid, config_filename, state, dry_run):
+ '''
+ Ping the given Healthchecks URL or UUID, modified with the monitor.State. Use the given
configuration filename in any log entries. If this is a dry run, then don't actually ping
anything.
'''
- if not ping_url_or_uuid:
- logger.debug('{}: No Healthchecks hook set'.format(config_filename))
- return
+ if state is monitor.State.START:
+ # Add a handler to the root logger that stores in memory the most recent logs emitted. That
+ # way, we can send them all to Healthchecks upon a finish or failure state.
+ logging.getLogger().addHandler(Forgetful_buffering_handler(PAYLOAD_LIMIT_BYTES))
+ payload = ''
ping_url = (
ping_url_or_uuid
@@ -22,16 +83,18 @@ def ping_healthchecks(ping_url_or_uuid, config_filename, dry_run, append=None):
)
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
- if append:
- ping_url = '{}/{}'.format(ping_url, append)
+ healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
+ if healthchecks_state:
+ ping_url = '{}/{}'.format(ping_url, healthchecks_state)
logger.info(
- '{}: Pinging Healthchecks{}{}'.format(
- config_filename, ' ' + append if append else '', dry_run_label
- )
+ '{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label)
)
logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url))
+ if state in (monitor.State.FINISH, monitor.State.FAIL):
+ payload = format_buffered_logs_for_payload()
+
if not dry_run:
logging.getLogger('urllib3').setLevel(logging.ERROR)
- requests.get(ping_url)
+ requests.post(ping_url, data=payload.encode('utf-8'))
diff --git a/borgmatic/hooks/monitor.py b/borgmatic/hooks/monitor.py
new file mode 100644
index 0000000..aee2b8f
--- /dev/null
+++ b/borgmatic/hooks/monitor.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+MONITOR_HOOK_NAMES = ('healthchecks', 'cronitor', 'cronhub')
+
+
+class State(Enum):
+ START = 1
+ FINISH = 2
+ FAIL = 3
diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py
new file mode 100644
index 0000000..b76e2be
--- /dev/null
+++ b/borgmatic/hooks/mysql.py
@@ -0,0 +1,111 @@
+import logging
+import os
+
+from borgmatic.execute import execute_command
+from borgmatic.hooks import dump
+
+logger = logging.getLogger(__name__)
+
+
+def make_dump_path(location_config): # pragma: no cover
+ '''
+ Make the dump path from the given location configuration and the name of this hook.
+ '''
+ return dump.make_database_dump_path(
+ location_config.get('borgmatic_source_directory'), 'mysql_databases'
+ )
+
+
+def dump_databases(databases, log_prefix, location_config, dry_run):
+ '''
+ Dump the given MySQL/MariaDB databases to disk. The databases are supplied as a sequence of
+ dicts, one dict describing each database as per the configuration schema. Use the given log
+ prefix in any log entries. Use the given location configuration dict to construct the
+ destination path. If this is a dry run, then don't actually dump anything.
+ '''
+ dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
+
+ logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
+
+ for database in databases:
+ name = database['name']
+ dump_filename = dump.make_database_dump_filename(
+ make_dump_path(location_config), name, database.get('hostname')
+ )
+ command = (
+ ('mysqldump', '--add-drop-database')
+ + (('--host', database['hostname']) if 'hostname' in database else ())
+ + (('--port', str(database['port'])) if 'port' in database else ())
+ + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ + (('--user', database['username']) if 'username' in database else ())
+ + (tuple(database['options'].split(' ')) if 'options' in database else ())
+ + (('--all-databases',) if name == 'all' else ('--databases', name))
+ )
+ extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
+
+ logger.debug(
+ '{}: Dumping MySQL database {} to {}{}'.format(
+ log_prefix, name, dump_filename, dry_run_label
+ )
+ )
+ if not dry_run:
+ os.makedirs(os.path.dirname(dump_filename), mode=0o700, exist_ok=True)
+ execute_command(
+ command, output_file=open(dump_filename, 'w'), extra_environment=extra_environment
+ )
+
+
+def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
+ '''
+ Remove the database dumps for the given databases. The databases are supplied as a sequence of
+ dicts, one dict describing each database as per the configuration schema. Use the log prefix in
+ any log entries. Use the given location configuration dict to construct the destination path. If
+ this is a dry run, then don't actually remove anything.
+ '''
+ dump.remove_database_dumps(
+ make_dump_path(location_config), databases, 'MySQL', log_prefix, dry_run
+ )
+
+
+def make_database_dump_patterns(databases, log_prefix, location_config, names):
+ '''
+ Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
+ and a sequence of database names to match, return the corresponding glob patterns to match the
+ database dumps in an archive. An empty sequence of names indicates that the patterns should
+ match all dumps.
+ '''
+ return [
+ dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
+ for name in (names or ['*'])
+ ]
+
+
+def restore_database_dumps(databases, log_prefix, location_config, dry_run):
+ '''
+ Restore the given MySQL/MariaDB databases from disk. The databases are supplied as a sequence of
+ dicts, one dict describing each database as per the configuration schema. Use the given log
+ prefix in any log entries. Use the given location configuration dict to construct the
+ destination path. If this is a dry run, then don't actually restore anything.
+ '''
+ dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
+
+ for database in databases:
+ dump_filename = dump.make_database_dump_filename(
+ make_dump_path(location_config), database['name'], database.get('hostname')
+ )
+ restore_command = (
+ ('mysql', '--batch')
+ + (('--host', database['hostname']) if 'hostname' in database else ())
+ + (('--port', str(database['port'])) if 'port' in database else ())
+ + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ + (('--user', database['username']) if 'username' in database else ())
+ )
+ extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
+
+ logger.debug(
+ '{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
+ )
+ if not dry_run:
+ execute_command(
+ restore_command, input_file=open(dump_filename), extra_environment=extra_environment
+ )
diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py
index c3407d9..7a46b26 100644
--- a/borgmatic/hooks/postgresql.py
+++ b/borgmatic/hooks/postgresql.py
@@ -1,42 +1,37 @@
-import glob
import logging
import os
from borgmatic.execute import execute_command
+from borgmatic.hooks import dump
-DUMP_PATH = '~/.borgmatic/postgresql_databases'
logger = logging.getLogger(__name__)
-def make_database_dump_filename(name, hostname=None):
+def make_dump_path(location_config): # pragma: no cover
'''
- Based on the given database name and hostname, return a filename to use for the database dump.
-
- Raise ValueError if the database name is invalid.
+ Make the dump path from the given location configuration and the name of this hook.
'''
- if os.path.sep in name:
- raise ValueError('Invalid database name {}'.format(name))
-
- return os.path.join(os.path.expanduser(DUMP_PATH), hostname or 'localhost', name)
+ return dump.make_database_dump_path(
+ location_config.get('borgmatic_source_directory'), 'postgresql_databases'
+ )
-def dump_databases(databases, log_prefix, dry_run):
+def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given PostgreSQL databases to disk. The databases are supplied as a sequence of dicts,
one dict describing each database as per the configuration schema. Use the given log prefix in
- any log entries. If this is a dry run, then don't actually dump anything.
+ any log entries. Use the given location configuration dict to construct the destination path. If
+ this is a dry run, then don't actually dump anything.
'''
- if not databases:
- logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
- return
-
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
for database in databases:
name = database['name']
- dump_filename = make_database_dump_filename(name, database.get('hostname'))
+ dump_filename = dump.make_database_dump_filename(
+ make_dump_path(location_config), name, database.get('hostname')
+ )
all_databases = bool(name == 'all')
command = (
('pg_dumpall' if all_databases else 'pg_dump', '--no-password', '--clean')
@@ -50,112 +45,54 @@ def dump_databases(databases, log_prefix, dry_run):
)
extra_environment = {'PGPASSWORD': database['password']} if 'password' in database else None
- logger.debug('{}: Dumping PostgreSQL database {}{}'.format(log_prefix, name, dry_run_label))
+ logger.debug(
+ '{}: Dumping PostgreSQL database {} to {}{}'.format(
+ log_prefix, name, dump_filename, dry_run_label
+ )
+ )
if not dry_run:
os.makedirs(os.path.dirname(dump_filename), mode=0o700, exist_ok=True)
execute_command(command, extra_environment=extra_environment)
-def remove_database_dumps(databases, log_prefix, dry_run):
+def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
'''
Remove the database dumps for the given databases. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the log prefix in
- any log entries. If this is a dry run, then don't actually remove anything.
+ any log entries. Use the given location configuration dict to construct the destination path. If
+ this is a dry run, then don't actually remove anything.
'''
- if not databases:
- logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
- return
-
- dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
+ dump.remove_database_dumps(
+ make_dump_path(location_config), databases, 'PostgreSQL', log_prefix, dry_run
+ )
- logger.info('{}: Removing PostgreSQL database dumps{}'.format(log_prefix, dry_run_label))
-
- for database in databases:
- dump_filename = make_database_dump_filename(database['name'], database.get('hostname'))
- logger.debug(
- '{}: Removing PostgreSQL database dump {} from {}{}'.format(
- log_prefix, database['name'], dump_filename, dry_run_label
- )
- )
- if dry_run:
- continue
-
- os.remove(dump_filename)
- dump_path = os.path.dirname(dump_filename)
-
- if len(os.listdir(dump_path)) == 0:
- os.rmdir(dump_path)
-
-
-def make_database_dump_patterns(names):
+def make_database_dump_patterns(databases, log_prefix, location_config, names):
'''
- Given a sequence of database names, return the corresponding glob patterns to match the database
- dumps in an archive. An empty sequence of names indicates that the patterns should match all
- dumps.
+ Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
+ and a sequence of database names to match, return the corresponding glob patterns to match the
+ database dumps in an archive. An empty sequence of names indicates that the patterns should
+ match all dumps.
'''
- return [make_database_dump_filename(name, hostname='*') for name in (names or ['*'])]
+ return [
+ dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
+ for name in (names or ['*'])
+ ]
-def convert_glob_patterns_to_borg_patterns(patterns):
- '''
- Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
- patterns like "sh:etc/*".
- '''
- return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
-
-
-def get_database_names_from_dumps(patterns):
- '''
- Given a sequence of database dump patterns, find the corresponding database dumps on disk and
- return the database names from their filenames.
- '''
- return [os.path.basename(dump_path) for pattern in patterns for dump_path in glob.glob(pattern)]
-
-
-def get_database_configurations(databases, names):
- '''
- Given the full database configuration dicts as per the configuration schema, and a sequence of
- database names, filter down and yield the configuration for just the named databases.
- Additionally, if a database configuration is named "all", project out that configuration for
- each named database.
-
- Raise ValueError if one of the database names cannot be matched to a database in borgmatic's
- database configuration.
- '''
- named_databases = {database['name']: database for database in databases}
-
- for name in names:
- database = named_databases.get(name)
- if database:
- yield database
- continue
-
- if 'all' in named_databases:
- yield {**named_databases['all'], **{'name': name}}
- continue
-
- raise ValueError(
- 'Cannot restore database "{}", as it is not defined in borgmatic\'s configuration'.format(
- name
- )
- )
-
-
-def restore_database_dumps(databases, log_prefix, dry_run):
+def restore_database_dumps(databases, log_prefix, location_config, dry_run):
'''
Restore the given PostgreSQL databases from disk. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log
- prefix in any log entries. If this is a dry run, then don't actually restore anything.
+ prefix in any log entries. Use the given location configuration dict to construct the
+ destination path. If this is a dry run, then don't actually restore anything.
'''
- if not databases:
- logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
- return
-
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
for database in databases:
- dump_filename = make_database_dump_filename(database['name'], database.get('hostname'))
+ dump_filename = dump.make_database_dump_filename(
+ make_dump_path(location_config), database['name'], database.get('hostname')
+ )
restore_command = (
('pg_restore', '--no-password', '--clean', '--if-exists', '--exit-on-error')
+ (('--host', database['hostname']) if 'hostname' in database else ())
diff --git a/borgmatic/logger.py b/borgmatic/logger.py
index 5157297..b20f89e 100644
--- a/borgmatic/logger.py
+++ b/borgmatic/logger.py
@@ -26,7 +26,7 @@ def interactive_console():
Return whether the current console is "interactive". Meaning: Capable of
user input and not just something like a cron job.
'''
- return sys.stdout.isatty() and os.environ.get('TERM') != 'dumb'
+ return sys.stderr.isatty() and os.environ.get('TERM') != 'dumb'
def should_do_markup(no_color, configs):
@@ -48,6 +48,42 @@ def should_do_markup(no_color, configs):
return interactive_console()
+class Multi_stream_handler(logging.Handler):
+ '''
+ A logging handler that dispatches each log record to one of multiple stream handlers depending
+ on the record's log level.
+ '''
+
+ def __init__(self, log_level_to_stream_handler):
+ super(Multi_stream_handler, self).__init__()
+ self.log_level_to_handler = log_level_to_stream_handler
+ self.handlers = set(self.log_level_to_handler.values())
+
+ def flush(self): # pragma: no cover
+ super(Multi_stream_handler, self).flush()
+
+ for handler in self.handlers:
+ handler.flush()
+
+ def emit(self, record):
+ '''
+ Dispatch the log record to the approriate stream handler for the record's log level.
+ '''
+ self.log_level_to_handler[record.levelno].emit(record)
+
+ def setFormatter(self, formatter): # pragma: no cover
+ super(Multi_stream_handler, self).setFormatter(formatter)
+
+ for handler in self.handlers:
+ handler.setFormatter(formatter)
+
+ def setLevel(self, level): # pragma: no cover
+ super(Multi_stream_handler, self).setLevel(level)
+
+ for handler in self.handlers:
+ handler.setLevel(level)
+
+
LOG_LEVEL_TO_COLOR = {
logging.CRITICAL: colorama.Fore.RED,
logging.ERROR: colorama.Fore.RED,
@@ -87,7 +123,19 @@ def configure_logging(
if log_file_log_level is None:
log_file_log_level = console_log_level
- console_handler = logging.StreamHandler()
+ # Log certain log levels to console stderr and others to stdout. This supports use cases like
+ # grepping (non-error) output.
+ console_error_handler = logging.StreamHandler(sys.stderr)
+ console_standard_handler = logging.StreamHandler(sys.stdout)
+ console_handler = Multi_stream_handler(
+ {
+ logging.CRITICAL: console_error_handler,
+ logging.ERROR: console_error_handler,
+ logging.WARN: console_standard_handler,
+ logging.INFO: console_standard_handler,
+ logging.DEBUG: console_standard_handler,
+ }
+ )
console_handler.setFormatter(Console_color_formatter())
console_handler.setLevel(console_log_level)
@@ -104,7 +152,7 @@ def configure_logging(
syslog_handler.setLevel(syslog_log_level)
handlers = (console_handler, syslog_handler)
elif log_file:
- file_handler = logging.FileHandler(log_file)
+ file_handler = logging.handlers.WatchedFileHandler(log_file)
file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s'))
file_handler.setLevel(log_file_log_level)
handlers = (console_handler, file_handler)
diff --git a/docs/Dockerfile b/docs/Dockerfile
index 829098f..9512154 100644
--- a/docs/Dockerfile
+++ b/docs/Dockerfile
@@ -3,7 +3,7 @@ FROM python:3.7.4-alpine3.10 as borgmatic
COPY . /app
RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
RUN borgmatic --help > /command-line.txt \
- && for action in init prune create check extract restore list info; do \
+ && for action in init prune create check extract mount umount restore list info; do \
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
&& borgmatic "$action" --help >> /command-line.txt; done
diff --git a/docs/_includes/components/suggestion-form.html b/docs/_includes/components/suggestion-form.html
index c4e59b2..8e3a73a 100644
--- a/docs/_includes/components/suggestion-form.html
+++ b/docs/_includes/components/suggestion-form.html
@@ -1,12 +1,12 @@
<h2>Improve this documentation</h2>
<p>Have an idea on how to make this documentation even better? Send your
-feedback below! (But if you need help installing or using borgmatic, please
-use our <a href="https://torsion.org/borgmatic/#issues">issue tracker</a>
-instead.)</p>
+feedback below! But if you need help with borgmatic, or have an idea for a
+borgmatic feature, please use our <a href="https://torsion.org/borgmatic/#issues">issue
+tracker</a> instead.</p>
<form id="suggestion-form">
- <div><label for="suggestion">Suggestion</label></div>
+ <div><label for="suggestion">Documentation suggestion</label></div>
<textarea id="suggestion" rows="8" cols="60" name="suggestion"></textarea>
<div data-sk-error="suggestion" class="form-error"></div>
<input id="_page" type="hidden" name="_page">
diff --git a/docs/how-to/backup-your-databases.md b/docs/how-to/backup-your-databases.md
index 21504b2..0ea771f 100644
--- a/docs/how-to/backup-your-databases.md
+++ b/docs/how-to/backup-your-databases.md
@@ -11,31 +11,44 @@ consistent snapshot that is more suited for backups.
Fortunately, borgmatic includes built-in support for creating database dumps
prior to running backups. For example, here is everything you need to dump and
-backup a couple of local PostgreSQL databases:
+backup a couple of local PostgreSQL databases and a MySQL/MariaDB database:
```yaml
hooks:
postgresql_databases:
- name: users
- name: orders
+ mysql_databases:
+ - name: posts
```
Prior to each backup, borgmatic dumps each configured database to a file
-(located in `~/.borgmatic/`) and includes it in the backup. After the backup
-completes, borgmatic removes the database dump files to recover disk space.
+and includes it in the backup. After the backup completes, borgmatic removes
+the database dump files to recover disk space.
-Here's a more involved example that connects to a remote database:
+borgmatic creates these temporary dump files in `~/.borgmatic` by default. To
+customize this path, set the `borgmatic_source_directory` option in the
+`location` section of borgmatic's configuration.
+
+Here's a more involved example that connects to remote databases:
```yaml
hooks:
postgresql_databases:
- name: users
- hostname: database.example.org
+ hostname: database1.example.org
port: 5433
- username: dbuser
+ username: postgres
password: trustsome1
format: tar
options: "--role=someone"
+ mysql_databases:
+ - name: posts
+ hostname: database2.example.org
+ port: 3307
+ username: root
+ password: trustsome1
+ options: "--skip-comments"
```
If you want to dump all databases on a host, use `all` for the database name:
@@ -44,10 +57,12 @@ If you want to dump all databases on a host, use `all` for the database name:
hooks:
postgresql_databases:
- name: all
+ mysql_databases:
+ - name: all
```
Note that you may need to use a `username` of the `postgres` superuser for
-this to work.
+this to work with PostgreSQL.
### Configuration backups
@@ -61,9 +76,9 @@ bring back any missing configuration files in order to restore a database.
## Supported databases
-As of now, borgmatic only supports PostgreSQL databases directly. But see
-below about general-purpose preparation and cleanup hooks as a work-around
-with other database systems. Also, please [file a
+As of now, borgmatic supports PostgreSQL and MySQL/MariaDB databases
+directly. But see below about general-purpose preparation and cleanup hooks as
+a work-around with other database systems. Also, please [file a
ticket](https://torsion.org/borgmatic/#issues) for additional database systems
that you'd like supported.
@@ -148,7 +163,8 @@ databases that share the exact same name on different hosts.
If you prefer to restore a database without the help of borgmatic, first
[extract](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) an
archive containing a database dump, and then manually restore the dump file
-found within the extracted `~/.borgmatic/` path (e.g. with `pg_restore`).
+found within the extracted `~/.borgmatic/` path (e.g. with `pg_restore` or
+`mysql` commands).
## Preparation and cleanup hooks
@@ -161,6 +177,24 @@ after backups. So if necessary, you can use these hooks to create database
dumps with any database system.
+## Troubleshooting
+
+### MySQL table lock errors
+
+If you encounter table lock errors during a database dump with MySQL/MariaDB,
+you may need to [use a
+transaction](https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html#option_mysqldump_single-transaction).
+You can add any additional flags to the `options:` in your database
+configuration. Here's an example:
+
+```yaml
+hooks:
+ mysql_databases:
+ - name: posts
+ options: "--single-transaction --quick"
+```
+
+
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
diff --git a/docs/how-to/develop-on-borgmatic.md b/docs/how-to/develop-on-borgmatic.md
index 5a52352..9e9fee7 100644
--- a/docs/how-to/develop-on-borgmatic.md
+++ b/docs/how-to/develop-on-borgmatic.md
@@ -75,14 +75,22 @@ tox -e isort
### End-to-end tests
borgmatic additionally includes some end-to-end tests that integration test
-with Borg for a few representative scenarios. These tests don't run by default
-because they're relatively slow and depend on Borg. If you would like to run
-them:
+with Borg and supported databases for a few representative scenarios. These
+tests don't run by default when running `tox`, because they're relatively slow
+and depend on Docker containers for runtime dependencies. These tests tests do
+run on the continuous integration (CI) server, and running them on your
+developer machine is the closest thing to CI test parity.
+
+If you would like to run the full test suite, first install Docker and [Docker
+Compose](https://docs.docker.com/compose/install/). Then run:
```bash
-tox -e end-to-end
+scripts/run-full-dev-tests
```
+Note that this scripts assumes you have permission to run Docker. If you
+don't, then you may need to run with `sudo`.
+
## Code style
Start with [PEP 8](https://www.python.org/dev/peps/pep-0008/). But then, apply
diff --git a/docs/how-to/extract-a-backup.md b/docs/how-to/extract-a-backup.md
index 554bf72..c45a3df 100644
--- a/docs/how-to/extract-a-backup.md
+++ b/docs/how-to/extract-a-backup.md
@@ -87,6 +87,41 @@ so that you can extract files from your archive without impacting your live
databases.
+## Mount a filesystem
+
+If instead of extracting files, you'd like to explore the files from an
+archive as a [FUSE](https://en.wikipedia.org/wiki/Filesystem_in_Userspace)
+filesystem, you can use the `borgmatic mount` action. Here's an example:
+
+```bash
+borgmatic mount --archive host-2019-... --mount-point /mnt
+```
+
+This mounts the entire archive on the given mount point `/mnt`, so that you
+can look in there for your files.
+
+Omit the `--archive` flag to mount all archives (lazy-loaded):
+
+```bash
+borgmatic mount --mount-point /mnt
+```
+
+If you'd like to restrict the mounted filesystem to only particular paths from
+your archive, use the `--path` flag, similar to the `extract` action above.
+For instance:
+
+```bash
+borgmatic mount --archive host-2019-... --mount-point /mnt --path var/lib
+```
+
+When you're all done exploring your files, unmount your mount point. No
+`--archive` flag is needed:
+
+```bash
+borgmatic umount --mount-point /mnt
+```
+
+
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
diff --git a/docs/how-to/inspect-your-backups.md b/docs/how-to/inspect-your-backups.md
index 370eb6b..cb52808 100644
--- a/docs/how-to/inspect-your-backups.md
+++ b/docs/how-to/inspect-your-backups.md
@@ -70,6 +70,21 @@ Or to increase syslog logging to include debug spew:
borgmatic --syslog-verbosity 2
```
+### Rate limiting
+
+If you are using rsyslog or systemd's journal, be aware that by default they
+both throttle the rate at which logging occurs. So you may need to change
+either [the global rate
+limit](https://www.rootusers.com/how-to-change-log-rate-limiting-in-linux/) or
+[the per-service rate
+limit](https://www.freedesktop.org/software/systemd/man/journald.conf.html#RateLimitIntervalSec=)
+if you're finding that borgmatic logs are missing.
+
+Note that the [sample borgmatic systemd service
+file](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#systemd)
+already has this rate limit disabled for systemd's journal.
+
+
### Logging to file
If you don't want to use syslog, and you'd rather borgmatic log to a plain
@@ -84,20 +99,6 @@ the log file so it doesn't grow too large. Also, there is a
`--log-file-verbosity` flag to customize the log file's log level.
-### systemd journal
-
-If your local syslog daemon is systemd's journal, be aware that journald by
-default throttles the rate at which a particular program can log. So you may
-need to [change the journald rate
-limit](https://www.freedesktop.org/software/systemd/man/journald.conf.html#RateLimitIntervalSec=)
-in `/etc/systemd/journald.conf` if you're finding that borgmatic journald logs
-are missing.
-
-Note that the [sample borgmatic systemd service
-file](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#systemd)
-already has this rate limit disabled.
-
-
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
diff --git a/docs/how-to/make-per-application-backups.md b/docs/how-to/make-per-application-backups.md
index f962b81..cd25746 100644
--- a/docs/how-to/make-per-application-backups.md
+++ b/docs/how-to/make-per-application-backups.md
@@ -22,6 +22,11 @@ When you set up multiple configuration files like this, borgmatic will run
each one in turn from a single borgmatic invocation. This includes, by
default, the traditional `/etc/borgmatic/config.yaml` as well.
+Each configuration file is interpreted independently, as if you ran borgmatic
+for each configuration file one at a time. In other words, borgmatic does not
+perform any merging of configuration files by default. If you'd like borgmatic
+to merge your configuration files, see below about configuration includes.
+
And if you need even more customizability, you can specify alternate
configuration paths on the command-line with borgmatic's `--config` option.
See `borgmatic --help` for more information.
@@ -110,6 +115,40 @@ Note that this `<<` include merging syntax is only for merging in mappings
directly, please see the section above about standard includes.
+## Configuration overrides
+
+In more complex multi-application setups, you may want to override particular
+borgmatic configuration file options at the time you run borgmatic. For
+instance, you could reuse a common configuration file for multiple
+applications, but then set the repository for each application at runtime. Or
+you might want to try a variant of an option for testing purposes without
+actually touching your configuration file.
+
+Whatever the reason, you can override borgmatic configuration options at the
+command-line via the `--override` flag. Here's an example:
+
+```bash
+borgmatic create --override location.remote_path=borg1
+```
+
+What this does is load your configuration files, and for each one, disregard
+the configured value for the `remote_path` option in the `location` section,
+and use the value of `borg1` instead.
+
+Note that the value is parsed as an actual YAML string, so you can even set
+list values by using brackets. For instance:
+
+```bash
+borgmatic create --override location.repositories=[test1.borg,test2.borg]
+```
+
+There is not currently a way to override a single element of a list without
+replacing the whole list.
+
+Be sure to quote your overrides if they contain spaces or other characters
+that your shell may interpret.
+
+
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
diff --git a/docs/how-to/monitor-your-backups.md b/docs/how-to/monitor-your-backups.md
index e0307ec..6f6f150 100644
--- a/docs/how-to/monitor-your-backups.md
+++ b/docs/how-to/monitor-your-backups.md
@@ -57,10 +57,10 @@ tests](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/).
## Error hooks
-When an error occurs during a backup, borgmatic can run configurable shell
-commands to fire off custom error notifications or take other actions, so you
-can get alerted as soon as something goes wrong. Here's a not-so-useful
-example:
+When an error occurs during a `prune`, `create`, or `check` action, borgmatic
+can run configurable shell commands to fire off custom error notifications or
+take other actions, so you can get alerted as soon as something goes wrong.
+Here's a not-so-useful example:
```yaml
hooks:
@@ -91,7 +91,9 @@ here:
* `output`: output of the command that failed (may be blank if an error
occurred without running a command)
-Note that borgmatic does not run `on_error` hooks if an error occurs within a
+Note that borgmatic runs the `on_error` hooks only for `prune`, `create`, or
+`check` actions or hooks in which an error occurs, and not other actions.
+borgmatic does not run `on_error` hooks if an error occurs within a
`before_everything` or `after_everything` hook. For more about hooks, see the
[borgmatic hooks
documentation](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/),
@@ -115,10 +117,23 @@ hooks:
With this hook in place, borgmatic pings your Healthchecks project when a
backup begins, ends, or errors. Specifically, before the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
-hooks</a> run, borgmatic lets Healthchecks know that a backup has started.
-Then, if the backup completes successfully, borgmatic notifies Healthchecks of
-the success after the `after_backup` hooks run. And if an error occurs during
-the backup, borgmatic notifies Healthchecks after the `on_error` hooks run.
+hooks</a> run, borgmatic lets Healthchecks know that it has started if any of
+the `prune`, `create`, or `check` actions are run.
+
+Then, if the actions complete successfully, borgmatic notifies Healthchecks of
+the success after the `after_backup` hooks run, and includes borgmatic logs in
+the payload data sent to Healthchecks. This means that borgmatic logs show up
+in the Healthchecks UI, although be aware that Healthchecks currently has a
+10-kilobyte limit for the logs in each ping.
+
+If an error occurs during any action, borgmatic notifies Healthchecks after
+the `on_error` hooks run, also tacking on logs including the error itself. But
+the logs are only included for errors that occur when a `prune`, `create`, or
+`check` action is run.
+
+Note that borgmatic sends logs to Healthchecks by applying the maximum of any
+other borgmatic verbosity levels (`--verbosity`, `--syslog-verbosity`, etc.),
+as there is not currently a dedicated Healthchecks verbosity setting.
You can configure Healthchecks to notify you by a [variety of
mechanisms](https://healthchecks.io/#welcome-integrations) when backups fail
@@ -142,10 +157,11 @@ hooks:
With this hook in place, borgmatic pings your Cronitor monitor when a backup
begins, ends, or errors. Specifically, before the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
-hooks</a> run, borgmatic lets Cronitor know that a backup has started. Then,
-if the backup completes successfully, borgmatic notifies Cronitor of the
-success after the `after_backup` hooks run. And if an error occurs during the
-backup, borgmatic notifies Cronitor after the `on_error` hooks run.
+hooks</a> run, borgmatic lets Cronitor know that it has started if any of the
+`prune`, `create`, or `check` actions are run. Then, if the actions complete
+successfully, borgmatic notifies Cronitor of the success after the
+`after_backup` hooks run. And if an error occurs during any action, borgmatic
+notifies Cronitor after the `on_error` hooks run.
You can configure Cronitor to notify you by a [variety of
mechanisms](https://cronitor.io/docs/cron-job-notifications) when backups fail
@@ -169,10 +185,11 @@ hooks:
With this hook in place, borgmatic pings your Cronhub monitor when a backup
begins, ends, or errors. Specifically, before the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
-hooks</a> run, borgmatic lets Cronhub know that a backup has started. Then,
-if the backup completes successfully, borgmatic notifies Cronhub of the
-success after the `after_backup` hooks run. And if an error occurs during the
-backup, borgmatic notifies Cronhub after the `on_error` hooks run.
+hooks</a> run, borgmatic lets Cronhub know that it has started if any of the
+`prune`, `create`, or `check` actions are run. Then, if the actions complete
+successfully, borgmatic notifies Cronhub of the success after the
+`after_backup` hooks run. And if an error occurs during any action, borgmatic
+notifies Cronhub after the `on_error` hooks run.
Note that even though you configure borgmatic with the "start" variant of the
ping URL, borgmatic substitutes the correct state into the URL when pinging
diff --git a/docs/how-to/set-up-backups.md b/docs/how-to/set-up-backups.md
index bea0447..bc78237 100644
--- a/docs/how-to/set-up-backups.md
+++ b/docs/how-to/set-up-backups.md
@@ -3,15 +3,11 @@ title: How to set up backups with borgmatic
---
## Installation
-To get up and running, first [install
-Borg](https://borgbackup.readthedocs.io/en/stable/installation.html), at
-least version 1.1.
+First, [install
+Borg](https://borgbackup.readthedocs.io/en/stable/installation.html), at least
+version 1.1.
-By default, borgmatic looks for its configuration files in `/etc/borgmatic/`
-and `/etc/borgmatic.d/`, where the root user typically has read access.
-
-So, to download and install borgmatic as the root user, run the following
-commands:
+Then, download and install borgmatic by running the following command:
```bash
sudo pip3 install --user --upgrade borgmatic
@@ -19,8 +15,14 @@ sudo pip3 install --user --upgrade borgmatic
This is a [recommended user site
installation](https://packaging.python.org/tutorials/installing-packages/#installing-to-the-user-site).
-You will need to ensure that `/root/.local/bin` is available on your `$PATH` so
-that the borgmatic executable is available.
+You will need to ensure that `/root/.local/bin` is available on your `$PATH`
+so
+that the borgmatic executable is available. For instance, adding this to
+root's `~/.profile` or `~/.bash_profile` may do the trick:
+
+```bash
+export PATH="$PATH:~/.local/bin"
+```
Note that your pip binary may have a different name than "pip3". Make sure
you're using Python 3, as borgmatic does not support Python 2.
@@ -30,11 +32,12 @@ you're using Python 3, as borgmatic does not support Python 2.
Along with the above process, you have several other options for installing
borgmatic:
+ * [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/)
* [Docker base image](https://hub.docker.com/r/monachus/borgmatic/)
- * [Docker image with support for scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/)
* [Debian](https://tracker.debian.org/pkg/borgmatic)
* [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic)
- * [Fedora](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
+ * [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
+ * [Fedora unofficial](https://copr.fedorainfracloud.org/coprs/heffer/borgmatic/)
* [Arch Linux](https://www.archlinux.org/packages/community/any/borgmatic/)
* [OpenBSD](http://ports.su/sysutils/borgmatic)
* [openSUSE](https://software.opensuse.org/package/borgmatic)
@@ -63,7 +66,7 @@ sudo generate-borgmatic-config
```
If that command is not found, then it may be installed in a location that's
-not in your system `PATH`. Try looking in `/usr/local/bin/`.
+not in your system `PATH` (see above). Try looking in `~/.local/bin/`.
This generates a sample configuration file at /etc/borgmatic/config.yaml (by
default). You should edit the file to suit your needs, as the values are
diff --git a/docs/how-to/upgrade.md b/docs/how-to/upgrade.md
index 18455e8..b99dea9 100644
--- a/docs/how-to/upgrade.md
+++ b/docs/how-to/upgrade.md
@@ -39,9 +39,9 @@ generate-borgmatic-config --source config.yaml --destination config-new.yaml
New options start as commented out, so you can edit the file and decide
whether you want to use each one.
-There are a few caveats to this process, however. First, when generating the
-new configuration file, `generate-borgmatic-config` replaces any comments
-you've written in your original configuration file with the newest generated
+There are a few caveats to this process. First, when generating the new
+configuration file, `generate-borgmatic-config` replaces any comments you've
+written in your original configuration file with the newest generated
comments. Second, the script adds back any options you had originally deleted,
although it does so with the options commented out. And finally, any YAML
includes you've used in the source configuration get flattened out into a
diff --git a/docs/static/borgbase.png b/docs/static/borgbase.png
new file mode 100644
index 0000000..0db0055
--- /dev/null
+++ b/docs/static/borgbase.png
Binary files differ
diff --git a/static/borgmatic.png b/docs/static/borgmatic.png
index e42d06d..e42d06d 100644
--- a/static/borgmatic.png
+++ b/docs/static/borgmatic.png
Binary files differ
diff --git a/static/borgmatic.svg b/docs/static/borgmatic.svg
index 1c30816..1c30816 100644
--- a/static/borgmatic.svg
+++ b/docs/static/borgmatic.svg
diff --git a/docs/static/cronhub.png b/docs/static/cronhub.png
new file mode 100644
index 0000000..a4be582
--- /dev/null
+++ b/docs/static/cronhub.png
Binary files differ
diff --git a/docs/static/cronitor.png b/docs/static/cronitor.png
new file mode 100644
index 0000000..4b87607
--- /dev/null
+++ b/docs/static/cronitor.png
Binary files differ
diff --git a/docs/static/healthchecks.png b/docs/static/healthchecks.png
new file mode 100644
index 0000000..7b5694a
--- /dev/null
+++ b/docs/static/healthchecks.png
Binary files differ
diff --git a/docs/static/mariadb.png b/docs/static/mariadb.png
new file mode 100644
index 0000000..c44bc48
--- /dev/null
+++ b/docs/static/mariadb.png
Binary files differ
diff --git a/docs/static/mysql.png b/docs/static/mysql.png
new file mode 100644
index 0000000..73b55bd
--- /dev/null
+++ b/docs/static/mysql.png
Binary files differ
diff --git a/docs/static/postgresql.png b/docs/static/postgresql.png
new file mode 100644
index 0000000..657b402
--- /dev/null
+++ b/docs/static/postgresql.png
Binary files differ
diff --git a/docs/static/rsyncnet.png b/docs/static/rsyncnet.png
new file mode 100644
index 0000000..3c027be
--- /dev/null
+++ b/docs/static/rsyncnet.png
Binary files differ
diff --git a/scripts/release b/scripts/release
index 0477b6b..afc99c3 100755
--- a/scripts/release
+++ b/scripts/release
@@ -23,10 +23,11 @@ git push github $version
rm -fr dist
python3 setup.py bdist_wheel
python3 setup.py sdist
+gpg --detach-sign --armor dist/*
twine upload -r pypi dist/borgmatic-*.tar.gz
twine upload -r pypi dist/borgmatic-*-py3-none-any.whl
-# Set release changelogs on projects.evoworx.org and GitHub.
+# Set release changelogs on projects.torsion.org and GitHub.
release_changelog="$(cat NEWS | sed '/^$/q' | grep -v '^\S')"
escaped_release_changelog="$(echo "$release_changelog" | sed -z 's/\n/\\n/g' | sed -z 's/\"/\\"/g')"
curl --silent --request POST \
diff --git a/scripts/run-full-dev-tests b/scripts/run-full-dev-tests
new file mode 100755
index 0000000..2c682f0
--- /dev/null
+++ b/scripts/run-full-dev-tests
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# This script is for running all tests, including end-to-end tests, on a developer machine. It sets
+# up database containers to run tests against, runs the tests, and then tears down the containers.
+#
+# Run this script from the root directory of the borgmatic source.
+#
+# For more information, see:
+# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
+
+set -e
+
+docker-compose --file tests/end-to-end/docker-compose.yaml up --force-recreate \
+ --abort-on-container-exit
diff --git a/scripts/run-full-tests b/scripts/run-full-tests
new file mode 100755
index 0000000..21035ee
--- /dev/null
+++ b/scripts/run-full-tests
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# This script installs test dependencies and runs all tests, including end-to-end tests. It
+# is designed to run inside a test container, and presumes that other test infrastructure like
+# databases are already running. Therefore, on a developer machine, you should not run this script
+# directly. Instead, run scripts/run-full-dev-tests
+#
+# For more information, see:
+# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
+
+set -e
+
+python -m pip install --upgrade pip==19.3.1
+pip install tox==3.14.1
+export COVERAGE_FILE=/tmp/.coverage
+tox --workdir /tmp/.tox
+apk add --no-cache borgbackup postgresql-client mariadb-client
+tox --workdir /tmp/.tox -e end-to-end
diff --git a/scripts/run-tests b/scripts/run-tests
deleted file mode 100755
index d2a91c2..0000000
--- a/scripts/run-tests
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-
-# This script is intended to be run from the continuous integration build
-# server, and not on a developer machine. For that, see:
-# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
-
-set -e
-
-python -m pip install --upgrade pip==19.3.1
-pip install tox==3.14.0
-tox
-apk add --no-cache borgbackup
-tox -e end-to-end
diff --git a/setup.py b/setup.py
index 06e90aa..d3ae216 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,6 @@
from setuptools import find_packages, setup
-VERSION = '1.4.8'
+VERSION = '1.4.21'
setup(
diff --git a/tests/end-to-end/docker-compose.yaml b/tests/end-to-end/docker-compose.yaml
new file mode 100644
index 0000000..156596a
--- /dev/null
+++ b/tests/end-to-end/docker-compose.yaml
@@ -0,0 +1,25 @@
+version: '3'
+services:
+ postgresql:
+ image: postgres:11.6-alpine
+ environment:
+ POSTGRES_PASSWORD: test
+ POSTGRES_DB: test
+ mysql:
+ image: mariadb:10.4
+ environment:
+ MYSQL_ROOT_PASSWORD: test
+ MYSQL_DATABASE: test
+ tests:
+ image: python:3.7-alpine3.10
+ volumes:
+ - "../..:/app:ro"
+ tmpfs:
+ - "/app/borgmatic.egg-info"
+ tty: true
+ working_dir: /app
+ command:
+ - /app/scripts/run-full-tests
+ depends_on:
+ - postgresql
+ - mysql
diff --git a/tests/end-to-end/test_borgmatic.py b/tests/end-to-end/test_borgmatic.py
index 620e67a..0bfe7b7 100644
--- a/tests/end-to-end/test_borgmatic.py
+++ b/tests/end-to-end/test_borgmatic.py
@@ -44,13 +44,13 @@ def test_borgmatic_command():
generate_configuration(config_path, repository_path)
subprocess.check_call(
- 'borgmatic -v 2 --config {} --init --encryption repokey'.format(config_path).split(' ')
+ 'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
)
# Run borgmatic to generate a backup archive, and then list it to make sure it exists.
subprocess.check_call('borgmatic --config {}'.format(config_path).split(' '))
output = subprocess.check_output(
- 'borgmatic --config {} --list --json'.format(config_path).split(' ')
+ 'borgmatic --config {} list --json'.format(config_path).split(' ')
).decode(sys.stdout.encoding)
parsed_output = json.loads(output)
@@ -61,7 +61,7 @@ def test_borgmatic_command():
# Extract the created archive into the current (temporary) directory, and confirm that the
# extracted file looks right.
output = subprocess.check_output(
- 'borgmatic --config {} --extract --archive {}'.format(config_path, archive_name).split(
+ 'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split(
' '
)
).decode(sys.stdout.encoding)
@@ -70,7 +70,7 @@ def test_borgmatic_command():
# Exercise the info flag.
output = subprocess.check_output(
- 'borgmatic --config {} --info --json'.format(config_path).split(' ')
+ 'borgmatic --config {} info --json'.format(config_path).split(' ')
).decode(sys.stdout.encoding)
parsed_output = json.loads(output)
diff --git a/tests/end-to-end/test_database.py b/tests/end-to-end/test_database.py
new file mode 100644
index 0000000..a011c6f
--- /dev/null
+++ b/tests/end-to-end/test_database.py
@@ -0,0 +1,83 @@
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def write_configuration(config_path, repository_path, borgmatic_source_directory):
+ '''
+ Write out borgmatic configuration into a file at the config path. Set the options so as to work
+ for testing. This includes injecting the given repository path, borgmatic source directory for
+ storing database dumps, and encryption passphrase.
+ '''
+ config = '''
+location:
+ source_directories:
+ - {}
+ repositories:
+ - {}
+ borgmatic_source_directory: {}
+
+storage:
+ encryption_passphrase: "test"
+
+hooks:
+ postgresql_databases:
+ - name: test
+ hostname: postgresql
+ username: postgres
+ password: test
+ mysql_databases:
+ - name: test
+ hostname: mysql
+ username: root
+ password: test
+'''.format(
+ config_path, repository_path, borgmatic_source_directory
+ )
+
+ config_file = open(config_path, 'w')
+ config_file.write(config)
+ config_file.close()
+
+
+def test_database_dump_and_restore():
+ # Create a Borg repository.
+ temporary_directory = tempfile.mkdtemp()
+ repository_path = os.path.join(temporary_directory, 'test.borg')
+ borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic')
+
+ original_working_directory = os.getcwd()
+
+ try:
+ config_path = os.path.join(temporary_directory, 'test.yaml')
+ write_configuration(config_path, repository_path, borgmatic_source_directory)
+
+ subprocess.check_call(
+ 'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
+ )
+
+ # Run borgmatic to generate a backup archive including a database dump
+ subprocess.check_call('borgmatic create --config {} -v 2'.format(config_path).split(' '))
+
+ # Get the created archive name.
+ output = subprocess.check_output(
+ 'borgmatic --config {} list --json'.format(config_path).split(' ')
+ ).decode(sys.stdout.encoding)
+ parsed_output = json.loads(output)
+
+ assert len(parsed_output) == 1
+ assert len(parsed_output[0]['archives']) == 1
+ archive_name = parsed_output[0]['archives'][0]['archive']
+
+ # Restore the database from the archive.
+ subprocess.check_call(
+ 'borgmatic --config {} restore --archive {}'.format(config_path, archive_name).split(
+ ' '
+ )
+ )
+ finally:
+ os.chdir(original_working_directory)
+ shutil.rmtree(temporary_directory)
diff --git a/tests/integration/commands/test_arguments.py b/tests/integration/commands/test_arguments.py
index 46203d5..9c5bb94 100644
--- a/tests/integration/commands/test_arguments.py
+++ b/tests/integration/commands/test_arguments.py
@@ -256,7 +256,7 @@ def test_parse_arguments_disallows_glob_archives_with_successful():
)
-def test_parse_arguments_disallows_repository_without_extract_or_list():
+def test_parse_arguments_disallows_repository_unless_action_consumes_it():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
with pytest.raises(SystemExit):
@@ -271,20 +271,36 @@ def test_parse_arguments_allows_repository_with_extract():
)
+def test_parse_arguments_allows_repository_with_mount():
+ flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
+
+ module.parse_arguments(
+ '--config',
+ 'myconfig',
+ 'mount',
+ '--repository',
+ 'test.borg',
+ '--archive',
+ 'test',
+ '--mount-point',
+ '/mnt',
+ )
+
+
def test_parse_arguments_allows_repository_with_list():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
module.parse_arguments('--config', 'myconfig', 'list', '--repository', 'test.borg')
-def test_parse_arguments_disallows_archive_without_extract_restore_or_list():
+def test_parse_arguments_disallows_archive_unless_action_consumes_it():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
with pytest.raises(SystemExit):
module.parse_arguments('--config', 'myconfig', '--archive', 'test')
-def test_parse_arguments_disallows_paths_without_extract():
+def test_parse_arguments_disallows_paths_unless_action_consumes_it():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
with pytest.raises(SystemExit):
@@ -297,6 +313,14 @@ def test_parse_arguments_allows_archive_with_extract():
module.parse_arguments('--config', 'myconfig', 'extract', '--archive', 'test')
+def test_parse_arguments_allows_archive_with_mount():
+ flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
+
+ module.parse_arguments(
+ '--config', 'myconfig', 'mount', '--archive', 'test', '--mount-point', '/mnt'
+ )
+
+
def test_parse_arguments_allows_archive_with_dashed_extract():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
@@ -335,6 +359,20 @@ def test_parse_arguments_requires_archive_with_restore():
module.parse_arguments('--config', 'myconfig', 'restore')
+def test_parse_arguments_requires_mount_point_with_mount():
+ flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
+
+ with pytest.raises(SystemExit):
+ module.parse_arguments('--config', 'myconfig', 'mount', '--archive', 'test')
+
+
+def test_parse_arguments_requires_mount_point_with_umount():
+ flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
+
+ with pytest.raises(SystemExit):
+ module.parse_arguments('--config', 'myconfig', 'umount')
+
+
def test_parse_arguments_allows_progress_before_create():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
diff --git a/tests/integration/config/test_override.py b/tests/integration/config/test_override.py
new file mode 100644
index 0000000..cfcd339
--- /dev/null
+++ b/tests/integration/config/test_override.py
@@ -0,0 +1,40 @@
+import pytest
+
+from borgmatic.config import override as module
+
+
+@pytest.mark.parametrize(
+ 'value,expected_result',
+ (
+ ('thing', 'thing'),
+ ('33', 33),
+ ('33b', '33b'),
+ ('true', True),
+ ('false', False),
+ ('[foo]', ['foo']),
+ ('[foo, bar]', ['foo', 'bar']),
+ ),
+)
+def test_convert_value_type_coerces_values(value, expected_result):
+ assert module.convert_value_type(value) == expected_result
+
+
+def test_apply_overrides_updates_config():
+ raw_overrides = [
+ 'section.key=value1',
+ 'other_section.thing=value2',
+ 'section.nested.key=value3',
+ 'new.foo=bar',
+ ]
+ config = {
+ 'section': {'key': 'value', 'other': 'other_value'},
+ 'other_section': {'thing': 'thing_value'},
+ }
+
+ module.apply_overrides(config, raw_overrides)
+
+ assert config == {
+ 'section': {'key': 'value1', 'other': 'other_value', 'nested': {'key': 'value3'}},
+ 'other_section': {'thing': 'value2'},
+ 'new': {'foo': 'bar'},
+ }
diff --git a/tests/integration/config/test_validate.py b/tests/integration/config/test_validate.py
index 706743b..cbd4f5b 100644
--- a/tests/integration/config/test_validate.py
+++ b/tests/integration/config/test_validate.py
@@ -212,3 +212,30 @@ def test_parse_configuration_raises_for_validation_error():
with pytest.raises(module.Validation_error):
module.parse_configuration('config.yaml', 'schema.yaml')
+
+
+def test_parse_configuration_applies_overrides():
+ mock_config_and_schema(
+ '''
+ location:
+ source_directories:
+ - /home
+
+ repositories:
+ - hostname.borg
+
+ local_path: borg1
+ '''
+ )
+
+ result = module.parse_configuration(
+ 'config.yaml', 'schema.yaml', overrides=['location.local_path=borg2']
+ )
+
+ assert result == {
+ 'location': {
+ 'source_directories': ['/home'],
+ 'repositories': ['hostname.borg'],
+ 'local_path': 'borg2',
+ }
+ }
diff --git a/tests/integration/test_execute.py b/tests/integration/test_execute.py
index 04788e8..c4c62c9 100644
--- a/tests/integration/test_execute.py
+++ b/tests/integration/test_execute.py
@@ -7,40 +7,42 @@ from flexmock import flexmock
from borgmatic import execute as module
-def test_execute_and_log_output_logs_each_line_separately():
+def test_log_output_logs_each_line_separately():
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'hi').once()
flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'there').once()
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
- module.execute_and_log_output(
+ hi_process = subprocess.Popen(['echo', 'hi'], stdout=subprocess.PIPE)
+ module.log_output(
['echo', 'hi'],
+ hi_process,
+ hi_process.stdout,
output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
error_on_warnings=False,
)
- module.execute_and_log_output(
+
+ there_process = subprocess.Popen(['echo', 'there'], stdout=subprocess.PIPE)
+ module.log_output(
['echo', 'there'],
+ there_process,
+ there_process.stdout,
output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
error_on_warnings=False,
)
-def test_execute_and_log_output_includes_error_output_in_exception():
+def test_log_output_includes_error_output_in_exception():
flexmock(module.logger).should_receive('log')
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
+ process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
with pytest.raises(subprocess.CalledProcessError) as error:
- module.execute_and_log_output(
+ module.log_output(
['grep'],
+ process,
+ process.stdout,
output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
error_on_warnings=False,
)
@@ -48,18 +50,19 @@ def test_execute_and_log_output_includes_error_output_in_exception():
assert error.value.output
-def test_execute_and_log_output_truncates_long_error_output():
+def test_log_output_truncates_long_error_output():
flexmock(module).ERROR_OUTPUT_MAX_LINE_COUNT = 0
flexmock(module.logger).should_receive('log')
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
+ process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
with pytest.raises(subprocess.CalledProcessError) as error:
- module.execute_and_log_output(
+ module.log_output(
['grep'],
+ process,
+ process.stdout,
output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
error_on_warnings=False,
)
@@ -67,15 +70,11 @@ def test_execute_and_log_output_truncates_long_error_output():
assert error.value.output.startswith('...')
-def test_execute_and_log_output_with_no_output_logs_nothing():
+def test_log_output_with_no_output_logs_nothing():
flexmock(module.logger).should_receive('log').never()
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
- module.execute_and_log_output(
- ['true'],
- output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
- error_on_warnings=False,
+ process = subprocess.Popen(['true'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ module.log_output(
+ ['true'], process, process.stdout, output_log_level=logging.INFO, error_on_warnings=False
)
diff --git a/tests/unit/borg/test_check.py b/tests/unit/borg/test_check.py
index 4675a5e..b2506aa 100644
--- a/tests/unit/borg/test_check.py
+++ b/tests/unit/borg/test_check.py
@@ -9,7 +9,9 @@ from ..test_verbosity import insert_logging_mock
def insert_execute_command_mock(command):
- flexmock(module).should_receive('execute_command').with_args(command).once()
+ flexmock(module).should_receive('execute_command').with_args(
+ command, error_on_warnings=True
+ ).once()
def insert_execute_command_never():
@@ -156,6 +158,21 @@ def test_make_check_flags_with_default_checks_and_prefix_includes_prefix_flag():
assert flags == ('--prefix', 'foo-')
+def test_check_archives_with_repair_calls_borg_with_repair_parameter():
+ checks = ('repository',)
+ consistency_config = {'check_last': None}
+ flexmock(module).should_receive('_parse_checks').and_return(checks)
+ flexmock(module).should_receive('_make_check_flags').and_return(())
+ flexmock(module).should_receive('execute_command').never()
+ flexmock(module).should_receive('execute_command_without_capture').with_args(
+ ('borg', 'check', '--repair', 'repo'), error_on_warnings=True
+ ).once()
+
+ module.check_archives(
+ repository='repo', storage_config={}, consistency_config=consistency_config, repair=True
+ )
+
+
@pytest.mark.parametrize(
'checks',
(
@@ -294,3 +311,17 @@ def test_check_archives_with_retention_prefix():
module.check_archives(
repository='repo', storage_config={}, consistency_config=consistency_config
)
+
+
+def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
+ checks = ('repository',)
+ consistency_config = {'check_last': None}
+ flexmock(module).should_receive('_parse_checks').and_return(checks)
+ flexmock(module).should_receive('_make_check_flags').and_return(())
+ insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
+
+ module.check_archives(
+ repository='repo',
+ storage_config={'extra_borg_options': {'check': '--extra --options'}},
+ consistency_config=consistency_config,
+ )
diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py
index 156be57..f904414 100644
--- a/tests/unit/borg/test_create.py
+++ b/tests/unit/borg/test_create.py
@@ -184,14 +184,21 @@ def test_borgmatic_source_directories_set_when_directory_exists():
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.os.path).should_receive('expanduser')
- assert module.borgmatic_source_directories() == [module.BORGMATIC_SOURCE_DIRECTORY]
+ assert module.borgmatic_source_directories('/tmp') == ['/tmp']
def test_borgmatic_source_directories_empty_when_directory_does_not_exist():
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.os.path).should_receive('expanduser')
- assert module.borgmatic_source_directories() == []
+ assert module.borgmatic_source_directories('/tmp') == []
+
+
+def test_borgmatic_source_directories_defaults_when_directory_not_given():
+ flexmock(module.os.path).should_receive('exists').and_return(True)
+ flexmock(module.os.path).should_receive('expanduser')
+
+ assert module.borgmatic_source_directories(None) == [module.DEFAULT_BORGMATIC_SOURCE_DIRECTORY]
DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
@@ -206,7 +213,9 @@ def test_create_archive_calls_borg_with_parameters():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -232,7 +241,9 @@ def test_create_archive_with_patterns_calls_borg_with_patterns():
flexmock(module).should_receive('_make_pattern_flags').and_return(pattern_flags)
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create') + pattern_flags + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create') + pattern_flags + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -258,7 +269,9 @@ def test_create_archive_with_exclude_patterns_calls_borg_with_excludes():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(exclude_flags)
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create') + exclude_flags + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create') + exclude_flags + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -284,6 +297,7 @@ def test_create_archive_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--list', '--filter', 'AME-', '--info', '--stats') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
insert_logging_mock(logging.INFO)
@@ -308,7 +322,9 @@ def test_create_archive_with_log_info_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS, output_log_level=None
+ ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS,
+ output_log_level=None,
+ error_on_warnings=False,
)
insert_logging_mock(logging.INFO)
@@ -336,6 +352,7 @@ def test_create_archive_with_log_debug_calls_borg_with_debug_parameter():
('borg', 'create', '--list', '--filter', 'AME-', '--stats', '--debug', '--show-rc')
+ ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
insert_logging_mock(logging.DEBUG)
@@ -359,7 +376,9 @@ def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS, output_log_level=None
+ ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS,
+ output_log_level=None,
+ error_on_warnings=False,
)
insert_logging_mock(logging.DEBUG)
@@ -385,7 +404,9 @@ def test_create_archive_with_dry_run_calls_borg_with_dry_run_parameter():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--dry-run') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--dry-run') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -414,6 +435,7 @@ def test_create_archive_with_dry_run_and_log_info_calls_borg_without_stats_param
('borg', 'create', '--list', '--filter', 'AME-', '--info', '--dry-run')
+ ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
insert_logging_mock(logging.INFO)
@@ -443,6 +465,7 @@ def test_create_archive_with_dry_run_and_log_debug_calls_borg_without_stats_para
('borg', 'create', '--list', '--filter', 'AME-', '--debug', '--show-rc', '--dry-run')
+ ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
insert_logging_mock(logging.DEBUG)
@@ -468,6 +491,7 @@ def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_inte
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--checkpoint-interval', '600') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -492,6 +516,7 @@ def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_param
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--chunker-params', '1,2,3,4') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -516,6 +541,7 @@ def test_create_archive_with_compression_calls_borg_with_compression_parameters(
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--compression', 'rle') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -540,6 +566,7 @@ def test_create_archive_with_remote_rate_limit_calls_borg_with_remote_ratelimit_
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--remote-ratelimit', '100') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -562,7 +589,9 @@ def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_par
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--one-file-system') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--one-file-system') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -586,7 +615,9 @@ def test_create_archive_with_numeric_owner_calls_borg_with_numeric_owner_paramet
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--numeric-owner') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--numeric-owner') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -610,7 +641,9 @@ def test_create_archive_with_read_special_calls_borg_with_read_special_parameter
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--read-special') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--read-special') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -635,7 +668,9 @@ def test_create_archive_with_option_true_calls_borg_without_corresponding_parame
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -662,6 +697,7 @@ def test_create_archive_with_option_false_calls_borg_with_corresponding_paramete
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--no' + option_name.replace('_', '')) + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -687,6 +723,7 @@ def test_create_archive_with_files_cache_calls_borg_with_files_cache_parameters(
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--files-cache', 'ctime,size') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -710,7 +747,9 @@ def test_create_archive_with_local_path_calls_borg_via_local_path():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg1', 'create') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg1', 'create') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -736,6 +775,7 @@ def test_create_archive_with_remote_path_calls_borg_with_remote_path_parameters(
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--remote-path', 'borg1') + ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -759,7 +799,9 @@ def test_create_archive_with_umask_calls_borg_with_umask_parameters():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--umask', '740') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--umask', '740') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -782,7 +824,9 @@ def test_create_archive_with_lock_wait_calls_borg_with_lock_wait_parameters():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--lock-wait', '5') + ARCHIVE_WITH_PATHS, output_log_level=logging.INFO
+ ('borg', 'create', '--lock-wait', '5') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -805,7 +849,9 @@ def test_create_archive_with_stats_calls_borg_with_stats_parameter():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--stats') + ARCHIVE_WITH_PATHS, output_log_level=logging.WARNING
+ ('borg', 'create', '--stats') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.create_archive(
@@ -821,6 +867,32 @@ def test_create_archive_with_stats_calls_borg_with_stats_parameter():
)
+def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_parameter_and_no_list():
+ flexmock(module).should_receive('borgmatic_source_directories').and_return([])
+ flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar'))
+ flexmock(module).should_receive('_expand_home_directories').and_return(())
+ flexmock(module).should_receive('_write_pattern_file').and_return(None)
+ flexmock(module).should_receive('_make_pattern_flags').and_return(())
+ flexmock(module).should_receive('_make_exclude_flags').and_return(())
+ flexmock(module).should_receive('execute_command_without_capture').with_args(
+ ('borg', 'create', '--info', '--stats', '--progress') + ARCHIVE_WITH_PATHS,
+ error_on_warnings=False,
+ )
+ insert_logging_mock(logging.INFO)
+
+ module.create_archive(
+ dry_run=False,
+ repository='repo',
+ location_config={
+ 'source_directories': ['foo', 'bar'],
+ 'repositories': ['repo'],
+ 'exclude_patterns': None,
+ },
+ storage_config={},
+ progress=True,
+ )
+
+
def test_create_archive_with_progress_calls_borg_with_progress_parameter():
flexmock(module).should_receive('borgmatic_source_directories').and_return([])
flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar'))
@@ -829,7 +901,7 @@ def test_create_archive_with_progress_calls_borg_with_progress_parameter():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command_without_capture').with_args(
- ('borg', 'create', '--progress') + ARCHIVE_WITH_PATHS
+ ('borg', 'create', '--progress') + ARCHIVE_WITH_PATHS, error_on_warnings=False
)
module.create_archive(
@@ -853,7 +925,9 @@ def test_create_archive_with_json_calls_borg_with_json_parameter():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS, output_log_level=None
+ ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS,
+ output_log_level=None,
+ error_on_warnings=False,
).and_return('[]')
json_output = module.create_archive(
@@ -879,7 +953,9 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter()
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS, output_log_level=None
+ ('borg', 'create', '--json') + ARCHIVE_WITH_PATHS,
+ output_log_level=None,
+ error_on_warnings=False,
).and_return('[]')
json_output = module.create_archive(
@@ -908,6 +984,7 @@ def test_create_archive_with_source_directories_glob_expands():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'),
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
flexmock(module.glob).should_receive('glob').with_args('foo*').and_return(['foo', 'food'])
@@ -933,6 +1010,7 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo*'),
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
flexmock(module.glob).should_receive('glob').with_args('foo*').and_return([])
@@ -958,6 +1036,7 @@ def test_create_archive_with_glob_calls_borg_with_expanded_directories():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'),
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -980,7 +1059,9 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name():
flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(())
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'create', 'repo::ARCHIVE_NAME', 'foo', 'bar'), output_log_level=logging.INFO
+ ('borg', 'create', 'repo::ARCHIVE_NAME', 'foo', 'bar'),
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -1005,6 +1086,7 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', 'repo::Documents_{hostname}-{now}', 'foo', 'bar'),
output_log_level=logging.INFO,
+ error_on_warnings=False,
)
module.create_archive(
@@ -1017,3 +1099,28 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
},
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'},
)
+
+
+def test_create_archive_with_extra_borg_options_calls_borg_with_extra_options():
+ flexmock(module).should_receive('borgmatic_source_directories').and_return([])
+ flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar'))
+ flexmock(module).should_receive('_expand_home_directories').and_return(())
+ flexmock(module).should_receive('_write_pattern_file').and_return(None)
+ flexmock(module).should_receive('_make_pattern_flags').and_return(())
+ flexmock(module).should_receive('_make_exclude_flags').and_return(())
+ flexmock(module).should_receive('execute_command').with_args(
+ ('borg', 'create', '--extra', '--options') + ARCHIVE_WITH_PATHS,
+ output_log_level=logging.INFO,
+ error_on_warnings=False,
+ )
+
+ module.create_archive(
+ dry_run=False,
+ repository='repo',
+ location_config={
+ 'source_directories': ['foo', 'bar'],
+ 'repositories': ['repo'],
+ 'exclude_patterns': None,
+ },
+ storage_config={'extra_borg_options': {'create': '--extra --options'}},
+ )
diff --git a/tests/unit/borg/test_extract.py b/tests/unit/borg/test_extract.py
index 1459d53..f6c904e 100644
--- a/tests/unit/borg/test_extract.py
+++ b/tests/unit/borg/test_extract.py
@@ -15,7 +15,7 @@ def insert_execute_command_mock(command, working_directory=None, error_on_warnin
def insert_execute_command_output_mock(command, result):
flexmock(module).should_receive('execute_command').with_args(
- command, output_log_level=None
+ command, output_log_level=None, error_on_warnings=False
).and_return(result).once()
@@ -87,7 +87,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_parameters():
module.extract_last_archive_dry_run(repository='repo', lock_wait=5)
-def test_extract_archive_calls_borg_with_restore_path_parameters():
+def test_extract_archive_calls_borg_with_path_parameters():
flexmock(module.os.path).should_receive('abspath').and_return('repo')
insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2'))
@@ -236,3 +236,19 @@ def test_extract_archive_calls_borg_with_progress_parameter():
storage_config={},
progress=True,
)
+
+
+def test_extract_archive_skips_abspath_for_remote_repository():
+ flexmock(module.os.path).should_receive('abspath').never()
+ flexmock(module).should_receive('execute_command').with_args(
+ ('borg', 'extract', 'server:repo::archive'), working_directory=None, error_on_warnings=True
+ ).once()
+
+ module.extract_archive(
+ dry_run=False,
+ repository='server:repo',
+ archive='archive',
+ paths=None,
+ location_config={},
+ storage_config={},
+ )
diff --git a/tests/unit/borg/test_info.py b/tests/unit/borg/test_info.py
index 93ff843..f09d4e8 100644
--- a/tests/unit/borg/test_info.py
+++ b/tests/unit/borg/test_info.py
@@ -10,7 +10,7 @@ from ..test_verbosity import insert_logging_mock
def test_display_archives_info_calls_borg_with_parameters():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'info', 'repo'), output_log_level=logging.WARNING, error_on_warnings=False
)
module.display_archives_info(
@@ -20,7 +20,9 @@ def test_display_archives_info_calls_borg_with_parameters():
def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--info', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'info', '--info', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
insert_logging_mock(logging.INFO)
module.display_archives_info(
@@ -30,7 +32,7 @@ def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--json', 'repo'), output_log_level=None
+ ('borg', 'info', '--json', 'repo'), output_log_level=None, error_on_warnings=False
).and_return('[]')
insert_logging_mock(logging.INFO)
@@ -43,7 +45,9 @@ def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_outpu
def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--debug', '--show-rc', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'info', '--debug', '--show-rc', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
insert_logging_mock(logging.DEBUG)
@@ -54,7 +58,7 @@ def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--json', 'repo'), output_log_level=None
+ ('borg', 'info', '--json', 'repo'), output_log_level=None, error_on_warnings=False
).and_return('[]')
insert_logging_mock(logging.DEBUG)
@@ -67,7 +71,7 @@ def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_outp
def test_display_archives_info_with_json_calls_borg_with_json_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--json', 'repo'), output_log_level=None
+ ('borg', 'info', '--json', 'repo'), output_log_level=None, error_on_warnings=False
).and_return('[]')
json_output = module.display_archives_info(
@@ -79,7 +83,7 @@ def test_display_archives_info_with_json_calls_borg_with_json_parameter():
def test_display_archives_info_with_archive_calls_borg_with_archive_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', 'repo::archive'), output_log_level=logging.WARNING
+ ('borg', 'info', 'repo::archive'), output_log_level=logging.WARNING, error_on_warnings=False
)
module.display_archives_info(
@@ -89,7 +93,7 @@ def test_display_archives_info_with_archive_calls_borg_with_archive_parameter():
def test_display_archives_info_with_local_path_calls_borg_via_local_path():
flexmock(module).should_receive('execute_command').with_args(
- ('borg1', 'info', 'repo'), output_log_level=logging.WARNING
+ ('borg1', 'info', 'repo'), output_log_level=logging.WARNING, error_on_warnings=False
)
module.display_archives_info(
@@ -102,7 +106,9 @@ def test_display_archives_info_with_local_path_calls_borg_via_local_path():
def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--remote-path', 'borg1', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'info', '--remote-path', 'borg1', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.display_archives_info(
@@ -116,7 +122,9 @@ def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_para
def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5}
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'info', '--lock-wait', '5', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'info', '--lock-wait', '5', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.display_archives_info(
@@ -131,6 +139,7 @@ def test_display_archives_info_passes_through_arguments_to_borg(argument_name):
flexmock(module).should_receive('execute_command').with_args(
('borg', 'info', '--' + argument_name.replace('_', '-'), 'value', 'repo'),
output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.display_archives_info(
diff --git a/tests/unit/borg/test_init.py b/tests/unit/borg/test_init.py
index 33c40b3..49e9933 100644
--- a/tests/unit/borg/test_init.py
+++ b/tests/unit/borg/test_init.py
@@ -24,7 +24,7 @@ def insert_info_command_not_found_mock():
def insert_init_command_mock(init_command, **kwargs):
flexmock(module).should_receive('execute_command_without_capture').with_args(
- init_command
+ init_command, error_on_warnings=False
).once()
@@ -32,7 +32,7 @@ def test_initialize_repository_calls_borg_with_parameters():
insert_info_command_not_found_mock()
insert_init_command_mock(INIT_COMMAND + ('repo',))
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(repository='repo', storage_config={}, encryption_mode='repokey')
def test_initialize_repository_raises_for_borg_init_error():
@@ -42,14 +42,16 @@ def test_initialize_repository_raises_for_borg_init_error():
)
with pytest.raises(subprocess.CalledProcessError):
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey'
+ )
def test_initialize_repository_skips_initialization_when_repository_already_exists():
insert_info_command_found_mock()
flexmock(module).should_receive('execute_command_without_capture').never()
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(repository='repo', storage_config={}, encryption_mode='repokey')
def test_initialize_repository_raises_for_unknown_info_command_error():
@@ -58,21 +60,27 @@ def test_initialize_repository_raises_for_unknown_info_command_error():
)
with pytest.raises(subprocess.CalledProcessError):
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey'
+ )
def test_initialize_repository_with_append_only_calls_borg_with_append_only_parameter():
insert_info_command_not_found_mock()
insert_init_command_mock(INIT_COMMAND + ('--append-only', 'repo'))
- module.initialize_repository(repository='repo', encryption_mode='repokey', append_only=True)
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey', append_only=True
+ )
def test_initialize_repository_with_storage_quota_calls_borg_with_storage_quota_parameter():
insert_info_command_not_found_mock()
insert_init_command_mock(INIT_COMMAND + ('--storage-quota', '5G', 'repo'))
- module.initialize_repository(repository='repo', encryption_mode='repokey', storage_quota='5G')
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey', storage_quota='5G'
+ )
def test_initialize_repository_with_log_info_calls_borg_with_info_parameter():
@@ -80,7 +88,7 @@ def test_initialize_repository_with_log_info_calls_borg_with_info_parameter():
insert_init_command_mock(INIT_COMMAND + ('--info', 'repo'))
insert_logging_mock(logging.INFO)
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(repository='repo', storage_config={}, encryption_mode='repokey')
def test_initialize_repository_with_log_debug_calls_borg_with_debug_parameter():
@@ -88,18 +96,33 @@ def test_initialize_repository_with_log_debug_calls_borg_with_debug_parameter():
insert_init_command_mock(INIT_COMMAND + ('--debug', 'repo'))
insert_logging_mock(logging.DEBUG)
- module.initialize_repository(repository='repo', encryption_mode='repokey')
+ module.initialize_repository(repository='repo', storage_config={}, encryption_mode='repokey')
def test_initialize_repository_with_local_path_calls_borg_via_local_path():
insert_info_command_not_found_mock()
insert_init_command_mock(('borg1',) + INIT_COMMAND[1:] + ('repo',))
- module.initialize_repository(repository='repo', encryption_mode='repokey', local_path='borg1')
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey', local_path='borg1'
+ )
def test_initialize_repository_with_remote_path_calls_borg_with_remote_path_parameter():
insert_info_command_not_found_mock()
insert_init_command_mock(INIT_COMMAND + ('--remote-path', 'borg1', 'repo'))
- module.initialize_repository(repository='repo', encryption_mode='repokey', remote_path='borg1')
+ module.initialize_repository(
+ repository='repo', storage_config={}, encryption_mode='repokey', remote_path='borg1'
+ )
+
+
+def test_initialize_repository_with_extra_borg_options_calls_borg_with_extra_options():
+ insert_info_command_not_found_mock()
+ insert_init_command_mock(INIT_COMMAND + ('--extra', '--options', 'repo'))
+
+ module.initialize_repository(
+ repository='repo',
+ storage_config={'extra_borg_options': {'init': '--extra --options'}},
+ encryption_mode='repokey',
+ )
diff --git a/tests/unit/borg/test_list.py b/tests/unit/borg/test_list.py
index 2b84d0e..40ad122 100644
--- a/tests/unit/borg/test_list.py
+++ b/tests/unit/borg/test_list.py
@@ -10,129 +10,154 @@ from ..test_verbosity import insert_logging_mock
def test_list_archives_calls_borg_with_parameters():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', 'repo'), output_log_level=logging.WARNING, error_on_warnings=False
)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
)
def test_list_archives_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--info', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', '--info', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
insert_logging_mock(logging.INFO)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
)
def test_list_archives_with_log_info_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--json', 'repo'), output_log_level=None
+ ('borg', 'list', '--json', 'repo'), output_log_level=None, error_on_warnings=False
)
insert_logging_mock(logging.INFO)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=True, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=True, successful=False),
)
def test_list_archives_with_log_debug_calls_borg_with_debug_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--debug', '--show-rc', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', '--debug', '--show-rc', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
insert_logging_mock(logging.DEBUG)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
)
def test_list_archives_with_log_debug_and_json_suppresses_most_borg_output():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--json', 'repo'), output_log_level=None
+ ('borg', 'list', '--json', 'repo'), output_log_level=None, error_on_warnings=False
)
insert_logging_mock(logging.DEBUG)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=True, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=True, successful=False),
)
def test_list_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5}
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--lock-wait', '5', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', '--lock-wait', '5', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.list_archives(
repository='repo',
storage_config=storage_config,
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
)
def test_list_archives_with_archive_calls_borg_with_archive_parameter():
storage_config = {}
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', 'repo::archive'), output_log_level=logging.WARNING
+ ('borg', 'list', 'repo::archive'), output_log_level=logging.WARNING, error_on_warnings=False
+ )
+
+ module.list_archives(
+ repository='repo',
+ storage_config=storage_config,
+ list_arguments=flexmock(archive='archive', paths=None, json=False, successful=False),
+ )
+
+
+def test_list_archives_with_path_calls_borg_with_path_parameter():
+ storage_config = {}
+ flexmock(module).should_receive('execute_command').with_args(
+ ('borg', 'list', 'repo::archive', 'var/lib'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.list_archives(
repository='repo',
storage_config=storage_config,
- list_arguments=flexmock(archive='archive', json=False, successful=False),
+ list_arguments=flexmock(archive='archive', paths=['var/lib'], json=False, successful=False),
)
def test_list_archives_with_local_path_calls_borg_via_local_path():
flexmock(module).should_receive('execute_command').with_args(
- ('borg1', 'list', 'repo'), output_log_level=logging.WARNING
+ ('borg1', 'list', 'repo'), output_log_level=logging.WARNING, error_on_warnings=False
)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
local_path='borg1',
)
def test_list_archives_with_remote_path_calls_borg_with_remote_path_parameters():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--remote-path', 'borg1', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', '--remote-path', 'borg1', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
)
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False),
remote_path='borg1',
)
def test_list_archives_with_short_calls_borg_with_short_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--short', 'repo'), output_log_level=logging.WARNING
+ ('borg', 'list', '--short', 'repo'),
+ output_log_level=logging.WARNING,
+ error_on_warnings=False,
).and_return('[]')
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=False, short=True),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=False, short=True),
)
@@ -154,13 +179,14 @@ def test_list_archives_passes_through_arguments_to_borg(argument_name):
flexmock(module).should_receive('execute_command').with_args(
('borg', 'list', '--' + argument_name.replace('_', '-'), 'value', 'repo'),
output_log_level=logging.WARNING,
+ error_on_warnings=False,
).and_return('[]')
module.list_archives(
repository='repo',
storage_config={},
list_arguments=flexmock(
- archive=None, json=False, successful=False, **{argument_name: 'value'}
+ archive=None, paths=None, json=False, successful=False, **{argument_name: 'value'}
),
)
@@ -169,24 +195,25 @@ def test_list_archives_with_successful_calls_borg_to_exclude_checkpoints():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'list', '--glob-archives', module.BORG_EXCLUDE_CHECKPOINTS_GLOB, 'repo'),
output_log_level=logging.WARNING,
+ error_on_warnings=False,
).and_return('[]')
module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=False, successful=True),
+ list_arguments=flexmock(archive=None, paths=None, json=False, successful=True),
)
def test_list_archives_with_json_calls_borg_with_json_parameter():
flexmock(module).should_receive('execute_command').with_args(
- ('borg', 'list', '--json', 'repo'), output_log_level=None
+ ('borg', 'list', '--json', 'repo'), output_log_level=None, error_on_warnings=False
).and_return('[]')
json_output = module.list_archives(
repository='repo',
storage_config={},
- list_arguments=flexmock(archive=None, json=True, successful=False),
+ list_arguments=flexmock(archive=None, paths=None, json=True, successful=False),
)
assert json_output == '[]'
diff --git a/tests/unit/borg/test_mount.py b/tests/unit/borg/test_mount.py
new file mode 100644
index 0000000..1091406
--- /dev/null
+++ b/tests/unit/borg/test_mount.py
@@ -0,0 +1,146 @@
+import logging
+
+from flexmock import flexmock
+
+from borgmatic.borg import mount as module
+
+from ..test_verbosity import insert_logging_mock
+
+
+def insert_execute_command_mock(command):
+ flexmock(module).should_receive('execute_command').with_args(
+ command, error_on_warnings=False
+ ).once()
+
+
+def test_mount_archive_calls_borg_with_required_parameters():
+ insert_execute_command_mock(('borg', 'mount', 'repo::archive', '/mnt'))
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={},
+ )
+
+
+def test_mount_archive_calls_borg_with_path_parameters():
+ insert_execute_command_mock(('borg', 'mount', 'repo::archive', '/mnt', 'path1', 'path2'))
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=['path1', 'path2'],
+ foreground=False,
+ options=None,
+ storage_config={},
+ )
+
+
+def test_mount_archive_calls_borg_with_remote_path_parameters():
+ insert_execute_command_mock(
+ ('borg', 'mount', '--remote-path', 'borg1', 'repo::archive', '/mnt')
+ )
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={},
+ remote_path='borg1',
+ )
+
+
+def test_mount_archive_calls_borg_with_umask_parameters():
+ insert_execute_command_mock(('borg', 'mount', '--umask', '0770', 'repo::archive', '/mnt'))
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={'umask': '0770'},
+ )
+
+
+def test_mount_archive_calls_borg_with_lock_wait_parameters():
+ insert_execute_command_mock(('borg', 'mount', '--lock-wait', '5', 'repo::archive', '/mnt'))
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={'lock_wait': '5'},
+ )
+
+
+def test_mount_archive_with_log_info_calls_borg_with_info_parameter():
+ insert_execute_command_mock(('borg', 'mount', '--info', 'repo::archive', '/mnt'))
+ insert_logging_mock(logging.INFO)
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={},
+ )
+
+
+def test_mount_archive_with_log_debug_calls_borg_with_debug_parameters():
+ insert_execute_command_mock(('borg', 'mount', '--debug', '--show-rc', 'repo::archive', '/mnt'))
+ insert_logging_mock(logging.DEBUG)
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options=None,
+ storage_config={},
+ )
+
+
+def test_mount_archive_calls_borg_with_foreground_parameter():
+ flexmock(module).should_receive('execute_command_without_capture').with_args(
+ ('borg', 'mount', '--foreground', 'repo::archive', '/mnt'), error_on_warnings=False
+ ).once()
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=True,
+ options=None,
+ storage_config={},
+ )
+
+
+def test_mount_archive_calls_borg_with_options_parameters():
+ insert_execute_command_mock(('borg', 'mount', '-o', 'super_mount', 'repo::archive', '/mnt'))
+
+ module.mount_archive(
+ repository='repo',
+ archive='archive',
+ mount_point='/mnt',
+ paths=None,
+ foreground=False,
+ options='super_mount',
+ storage_config={},
+ )
diff --git a/tests/unit/borg/test_prune.py b/tests/unit/borg/test_prune.py
index d05e8c0..b2b4785 100644
--- a/tests/unit/borg/test_prune.py
+++ b/tests/unit/borg/test_prune.py
@@ -10,7 +10,7 @@ from ..test_verbosity import insert_logging_mock
def insert_execute_command_mock(prune_command, output_log_level):
flexmock(module).should_receive('execute_command').with_args(
- prune_command, output_log_level=output_log_level
+ prune_command, output_log_level=output_log_level, error_on_warnings=False
).once()
@@ -75,7 +75,9 @@ def test_prune_archives_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS
)
- insert_execute_command_mock(PRUNE_COMMAND + ('--stats', '--info', 'repo'), logging.INFO)
+ insert_execute_command_mock(
+ PRUNE_COMMAND + ('--stats', '--info', '--list', 'repo'), logging.INFO
+ )
insert_logging_mock(logging.INFO)
module.prune_archives(
@@ -188,3 +190,18 @@ def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config=storage_config,
retention_config=retention_config,
)
+
+
+def test_prune_archives_with_extra_borg_options_calls_borg_with_extra_options():
+ retention_config = flexmock()
+ flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
+ BASE_PRUNE_FLAGS
+ )
+ insert_execute_command_mock(PRUNE_COMMAND + ('--extra', '--options', 'repo'), logging.INFO)
+
+ module.prune_archives(
+ dry_run=False,
+ repository='repo',
+ storage_config={'extra_borg_options': {'prune': '--extra --options'}},
+ retention_config=retention_config,
+ )
diff --git a/tests/unit/borg/test_umount.py b/tests/unit/borg/test_umount.py
new file mode 100644
index 0000000..78942ac
--- /dev/null
+++ b/tests/unit/borg/test_umount.py
@@ -0,0 +1,33 @@
+import logging
+
+from flexmock import flexmock
+
+from borgmatic.borg import umount as module
+
+from ..test_verbosity import insert_logging_mock
+
+
+def insert_execute_command_mock(command):
+ flexmock(module).should_receive('execute_command').with_args(
+ command, error_on_warnings=True
+ ).once()
+
+
+def test_unmount_archive_calls_borg_with_required_parameters():
+ insert_execute_command_mock(('borg', 'umount', '/mnt'))
+
+ module.unmount_archive(mount_point='/mnt')
+
+
+def test_unmount_archive_with_log_info_calls_borg_with_info_parameter():
+ insert_execute_command_mock(('borg', 'umount', '--info', '/mnt'))
+ insert_logging_mock(logging.INFO)
+
+ module.unmount_archive(mount_point='/mnt')
+
+
+def test_unmount_archive_with_log_debug_calls_borg_with_debug_parameters():
+ insert_execute_command_mock(('borg', 'umount', '--debug', '--show-rc', '/mnt'))
+ insert_logging_mock(logging.DEBUG)
+
+ module.unmount_archive(mount_point='/mnt')
diff --git a/tests/unit/commands/test_borgmatic.py b/tests/unit/commands/test_borgmatic.py
index 4712407..ccc6375 100644
--- a/tests/unit/commands/test_borgmatic.py
+++ b/tests/unit/commands/test_borgmatic.py
@@ -20,12 +20,21 @@ def test_run_configuration_runs_actions_for_each_repository():
assert results == expected_results
-def test_run_configuration_executes_hooks_for_create_action():
+def test_run_configuration_calls_hooks_for_prune_action():
+ flexmock(module.borg_environment).should_receive('initialize')
+ flexmock(module.command).should_receive('execute_hook').never()
+ flexmock(module.dispatch).should_receive('call_hooks').at_least().twice()
+ flexmock(module).should_receive('run_actions').and_return([])
+ config = {'location': {'repositories': ['foo']}}
+ arguments = {'global': flexmock(dry_run=False), 'prune': flexmock()}
+
+ list(module.run_configuration('test.yaml', config, arguments))
+
+
+def test_run_configuration_executes_and_calls_hooks_for_create_action():
flexmock(module.borg_environment).should_receive('initialize')
flexmock(module.command).should_receive('execute_hook').twice()
- flexmock(module.postgresql).should_receive('dump_databases').once()
- flexmock(module.healthchecks).should_receive('ping_healthchecks').twice()
- flexmock(module.postgresql).should_receive('remove_database_dumps').once()
+ flexmock(module.dispatch).should_receive('call_hooks').at_least().twice()
flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(dry_run=False), 'create': flexmock()}
@@ -33,11 +42,32 @@ def test_run_configuration_executes_hooks_for_create_action():
list(module.run_configuration('test.yaml', config, arguments))
+def test_run_configuration_calls_hooks_for_check_action():
+ flexmock(module.borg_environment).should_receive('initialize')
+ flexmock(module.command).should_receive('execute_hook').never()
+ flexmock(module.dispatch).should_receive('call_hooks').at_least().twice()
+ flexmock(module).should_receive('run_actions').and_return([])
+ config = {'location': {'repositories': ['foo']}}
+ arguments = {'global': flexmock(dry_run=False), 'check': flexmock()}
+
+ list(module.run_configuration('test.yaml', config, arguments))
+
+
+def test_run_configuration_does_not_trigger_hooks_for_list_action():
+ flexmock(module.borg_environment).should_receive('initialize')
+ flexmock(module.command).should_receive('execute_hook').never()
+ flexmock(module.dispatch).should_receive('call_hooks').never()
+ flexmock(module).should_receive('run_actions').and_return([])
+ config = {'location': {'repositories': ['foo']}}
+ arguments = {'global': flexmock(dry_run=False), 'list': flexmock()}
+
+ list(module.run_configuration('test.yaml', config, arguments))
+
+
def test_run_configuration_logs_actions_error():
flexmock(module.borg_environment).should_receive('initialize')
flexmock(module.command).should_receive('execute_hook')
- flexmock(module.postgresql).should_receive('dump_databases')
- flexmock(module.healthchecks).should_receive('ping_healthchecks')
+ flexmock(module.dispatch).should_receive('call_hooks')
expected_results = [flexmock()]
flexmock(module).should_receive('make_error_log_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_raise(OSError)
@@ -68,6 +98,7 @@ def test_run_configuration_logs_post_hook_error():
flexmock(module.command).should_receive('execute_hook').and_return(None).and_raise(
OSError
).and_return(None)
+ flexmock(module.dispatch).should_receive('call_hooks')
expected_results = [flexmock()]
flexmock(module).should_receive('make_error_log_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_return([])
@@ -88,7 +119,7 @@ def test_run_configuration_logs_on_error_hook_error():
).and_return(expected_results[1:])
flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': ['foo']}}
- arguments = {'global': flexmock(dry_run=False)}
+ arguments = {'global': flexmock(dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@@ -117,41 +148,72 @@ def test_load_configurations_logs_critical_for_parse_error():
assert {log.levelno for log in logs} == {logging.CRITICAL}
+def test_log_record_does_not_raise():
+ module.log_record(levelno=1, foo='bar', baz='quux')
+
+
+def test_log_record_with_suppress_does_not_raise():
+ module.log_record(levelno=1, foo='bar', baz='quux', suppress_log=True)
+
+
def test_make_error_log_records_generates_output_logs_for_message_only():
+ flexmock(module).should_receive('log_record').replace_with(dict)
+
logs = tuple(module.make_error_log_records('Error'))
- assert {log.levelno for log in logs} == {logging.CRITICAL}
+ assert {log['levelno'] for log in logs} == {logging.CRITICAL}
def test_make_error_log_records_generates_output_logs_for_called_process_error():
+ flexmock(module).should_receive('log_record').replace_with(dict)
+ flexmock(module.logger).should_receive('getEffectiveLevel').and_return(logging.WARNING)
+
logs = tuple(
module.make_error_log_records(
'Error', subprocess.CalledProcessError(1, 'ls', 'error output')
)
)
- assert {log.levelno for log in logs} == {logging.CRITICAL}
+ assert {log['levelno'] for log in logs} == {logging.CRITICAL}
assert any(log for log in logs if 'error output' in str(log))
def test_make_error_log_records_generates_logs_for_value_error():
+ flexmock(module).should_receive('log_record').replace_with(dict)
+
logs = tuple(module.make_error_log_records('Error', ValueError()))
- assert {log.levelno for log in logs} == {logging.CRITICAL}
+ assert {log['levelno'] for log in logs} == {logging.CRITICAL}
def test_make_error_log_records_generates_logs_for_os_error():
+ flexmock(module).should_receive('log_record').replace_with(dict)
+
logs = tuple(module.make_error_log_records('Error', OSError()))
- assert {log.levelno for log in logs} == {logging.CRITICAL}
+ assert {log['levelno'] for log in logs} == {logging.CRITICAL}
def test_make_error_log_records_generates_nothing_for_other_error():
+ flexmock(module).should_receive('log_record').replace_with(dict)
+
logs = tuple(module.make_error_log_records('Error', KeyError()))
assert logs == ()
+def test_get_local_path_uses_configuration_value():
+ assert module.get_local_path({'test.yaml': {'location': {'local_path': 'borg1'}}}) == 'borg1'
+
+
+def test_get_local_path_without_location_defaults_to_borg():
+ assert module.get_local_path({'test.yaml': {}}) == 'borg'
+
+
+def test_get_local_path_without_local_path_defaults_to_borg():
+ assert module.get_local_path({'test.yaml': {'location': {}}}) == 'borg'
+
+
def test_collect_configuration_run_summary_logs_info_for_success():
flexmock(module.command).should_receive('execute_hook').never()
flexmock(module).should_receive('run_configuration').and_return([])
@@ -202,6 +264,33 @@ def test_collect_configuration_run_summary_logs_extract_with_repository_error():
assert logs == expected_logs
+def test_collect_configuration_run_summary_logs_info_for_success_with_mount():
+ flexmock(module.validate).should_receive('guard_configuration_contains_repository')
+ flexmock(module).should_receive('run_configuration').and_return([])
+ arguments = {'mount': flexmock(repository='repo')}
+
+ logs = tuple(
+ module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments)
+ )
+
+ assert {log.levelno for log in logs} == {logging.INFO}
+
+
+def test_collect_configuration_run_summary_logs_mount_with_repository_error():
+ flexmock(module.validate).should_receive('guard_configuration_contains_repository').and_raise(
+ ValueError
+ )
+ expected_logs = (flexmock(),)
+ flexmock(module).should_receive('make_error_log_records').and_return(expected_logs)
+ arguments = {'mount': flexmock(repository='repo')}
+
+ logs = tuple(
+ module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments)
+ )
+
+ assert logs == expected_logs
+
+
def test_collect_configuration_run_summary_logs_missing_configs_error():
arguments = {'global': flexmock(config_paths=[])}
expected_logs = (flexmock(),)
@@ -270,6 +359,7 @@ def test_collect_configuration_run_summary_logs_run_configuration_error():
flexmock(module).should_receive('run_configuration').and_return(
[logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg='Error'))]
)
+ flexmock(module).should_receive('make_error_log_records').and_return([])
arguments = {}
logs = tuple(
@@ -279,6 +369,22 @@ def test_collect_configuration_run_summary_logs_run_configuration_error():
assert {log.levelno for log in logs} == {logging.CRITICAL}
+def test_collect_configuration_run_summary_logs_run_umount_error():
+ flexmock(module.validate).should_receive('guard_configuration_contains_repository')
+ flexmock(module).should_receive('run_configuration').and_return([])
+ flexmock(module.borg_umount).should_receive('unmount_archive').and_raise(OSError)
+ flexmock(module).should_receive('make_error_log_records').and_return(
+ [logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg='Error'))]
+ )
+ arguments = {'umount': flexmock(mount_point='/mnt')}
+
+ logs = tuple(
+ module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments)
+ )
+
+ assert {log.levelno for log in logs} == {logging.INFO, logging.CRITICAL}
+
+
def test_collect_configuration_run_summary_logs_outputs_merged_json_results():
flexmock(module).should_receive('run_configuration').and_return(['foo', 'bar']).and_return(
['baz']
diff --git a/tests/unit/config/test_override.py b/tests/unit/config/test_override.py
new file mode 100644
index 0000000..6925ca4
--- /dev/null
+++ b/tests/unit/config/test_override.py
@@ -0,0 +1,82 @@
+import pytest
+from flexmock import flexmock
+
+from borgmatic.config import override as module
+
+
+def test_set_values_with_empty_keys_bails():
+ config = {}
+
+ module.set_values(config, keys=(), value='value')
+
+ assert config == {}
+
+
+def test_set_values_with_one_key_sets_it_into_config():
+ config = {}
+
+ module.set_values(config, keys=('key',), value='value')
+
+ assert config == {'key': 'value'}
+
+
+def test_set_values_with_one_key_overwrites_existing_key():
+ config = {'key': 'old_value', 'other': 'other_value'}
+
+ module.set_values(config, keys=('key',), value='value')
+
+ assert config == {'key': 'value', 'other': 'other_value'}
+
+
+def test_set_values_with_multiple_keys_creates_hierarchy():
+ config = {}
+
+ module.set_values(config, ('section', 'key'), 'value')
+
+ assert config == {'section': {'key': 'value'}}
+
+
+def test_set_values_with_multiple_keys_updates_hierarchy():
+ config = {'section': {'other': 'other_value'}}
+ module.set_values(config, ('section', 'key'), 'value')
+
+ assert config == {'section': {'key': 'value', 'other': 'other_value'}}
+
+
+def test_parse_overrides_splits_keys_and_values():
+ flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
+ raw_overrides = ['section.my_option=value1', 'section.other_option=value2']
+ expected_result = (
+ (('section', 'my_option'), 'value1'),
+ (('section', 'other_option'), 'value2'),
+ )
+
+ module.parse_overrides(raw_overrides) == expected_result
+
+
+def test_parse_overrides_allows_value_with_equal_sign():
+ flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
+ raw_overrides = ['section.option=this===value']
+ expected_result = ((('section', 'option'), 'this===value'),)
+
+ module.parse_overrides(raw_overrides) == expected_result
+
+
+def test_parse_overrides_raises_on_missing_equal_sign():
+ flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
+ raw_overrides = ['section.option']
+
+ with pytest.raises(ValueError):
+ module.parse_overrides(raw_overrides)
+
+
+def test_parse_overrides_allows_value_with_single_key():
+ flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
+ raw_overrides = ['option=value']
+ expected_result = ((('option',), 'value'),)
+
+ module.parse_overrides(raw_overrides) == expected_result
+
+
+def test_parse_overrides_handles_empty_overrides():
+ module.parse_overrides(raw_overrides=None) == ()
diff --git a/tests/unit/config/test_validate.py b/tests/unit/config/test_validate.py
index bdf30be..4fc4a62 100644
--- a/tests/unit/config/test_validate.py
+++ b/tests/unit/config/test_validate.py
@@ -1,4 +1,5 @@
import pytest
+from flexmock import flexmock
from borgmatic.config import validate as module
@@ -95,7 +96,38 @@ def test_remove_examples_strips_examples_from_sequence_of_maps():
assert schema == {'seq': [{'map': {'foo': {'desc': 'thing'}}}]}
+def test_normalize_repository_path_passes_through_remote_repository():
+ repository = 'example.org:test.borg'
+
+ module.normalize_repository_path(repository) == repository
+
+
+def test_normalize_repository_path_passes_through_absolute_repository():
+ repository = '/foo/bar/test.borg'
+ flexmock(module.os.path).should_receive('abspath').and_return(repository)
+
+ module.normalize_repository_path(repository) == repository
+
+
+def test_normalize_repository_path_resolves_relative_repository():
+ repository = 'test.borg'
+ absolute = '/foo/bar/test.borg'
+ flexmock(module.os.path).should_receive('abspath').and_return(absolute)
+
+ module.normalize_repository_path(repository) == absolute
+
+
+def test_repositories_match_does_not_raise():
+ flexmock(module).should_receive('normalize_repository_path')
+
+ module.repositories_match('foo', 'bar')
+
+
def test_guard_configuration_contains_repository_does_not_raise_when_repository_in_config():
+ flexmock(module).should_receive('repositories_match').replace_with(
+ lambda first, second: first == second
+ )
+
module.guard_configuration_contains_repository(
repository='repo', configurations={'config.yaml': {'location': {'repositories': ['repo']}}}
)
@@ -116,6 +148,10 @@ def test_guard_configuration_contains_repository_errors_when_repository_assumed_
def test_guard_configuration_contains_repository_errors_when_repository_missing_from_config():
+ flexmock(module).should_receive('repositories_match').replace_with(
+ lambda first, second: first == second
+ )
+
with pytest.raises(ValueError):
module.guard_configuration_contains_repository(
repository='nope',
@@ -124,6 +160,10 @@ def test_guard_configuration_contains_repository_errors_when_repository_missing_
def test_guard_configuration_contains_repository_errors_when_repository_matches_config_twice():
+ flexmock(module).should_receive('repositories_match').replace_with(
+ lambda first, second: first == second
+ )
+
with pytest.raises(ValueError):
module.guard_configuration_contains_repository(
repository='repo',
diff --git a/tests/unit/hooks/test_cronhub.py b/tests/unit/hooks/test_cronhub.py
index dc02387..3201811 100644
--- a/tests/unit/hooks/test_cronhub.py
+++ b/tests/unit/hooks/test_cronhub.py
@@ -3,30 +3,36 @@ from flexmock import flexmock
from borgmatic.hooks import cronhub as module
-def test_ping_cronhub_hits_ping_url_with_start_state():
+def test_ping_monitor_rewrites_ping_url_for_start_state():
ping_url = 'https://example.com/start/abcdef'
- state = 'bork'
- flexmock(module.requests).should_receive('get').with_args('https://example.com/bork/abcdef')
+ flexmock(module.requests).should_receive('get').with_args('https://example.com/start/abcdef')
- module.ping_cronhub(ping_url, 'config.yaml', dry_run=False, state=state)
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.START, dry_run=False)
-def test_ping_cronhub_hits_ping_url_with_ping_state():
+def test_ping_monitor_rewrites_ping_url_and_state_for_start_state():
ping_url = 'https://example.com/ping/abcdef'
- state = 'bork'
- flexmock(module.requests).should_receive('get').with_args('https://example.com/bork/abcdef')
+ flexmock(module.requests).should_receive('get').with_args('https://example.com/start/abcdef')
- module.ping_cronhub(ping_url, 'config.yaml', dry_run=False, state=state)
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.START, dry_run=False)
-def test_ping_cronhub_without_ping_url_does_not_raise():
- flexmock(module.requests).should_receive('get').never()
+def test_ping_monitor_rewrites_ping_url_for_finish_state():
+ ping_url = 'https://example.com/start/abcdef'
+ flexmock(module.requests).should_receive('get').with_args('https://example.com/finish/abcdef')
+
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.FINISH, dry_run=False)
+
+
+def test_ping_monitor_rewrites_ping_url_for_fail_state():
+ ping_url = 'https://example.com/start/abcdef'
+ flexmock(module.requests).should_receive('get').with_args('https://example.com/fail/abcdef')
- module.ping_cronhub(ping_url=None, config_filename='config.yaml', dry_run=False, state='oops')
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.FAIL, dry_run=False)
-def test_ping_cronhub_dry_run_does_not_hit_ping_url():
+def test_ping_monitor_dry_run_does_not_hit_ping_url():
ping_url = 'https://example.com'
flexmock(module.requests).should_receive('get').never()
- module.ping_cronhub(ping_url, 'config.yaml', dry_run=True, state='yay')
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.START, dry_run=True)
diff --git a/tests/unit/hooks/test_cronitor.py b/tests/unit/hooks/test_cronitor.py
index 19a2bc4..73bcffa 100644
--- a/tests/unit/hooks/test_cronitor.py
+++ b/tests/unit/hooks/test_cronitor.py
@@ -3,22 +3,29 @@ from flexmock import flexmock
from borgmatic.hooks import cronitor as module
-def test_ping_cronitor_hits_ping_url():
+def test_ping_monitor_hits_ping_url_for_start_state():
ping_url = 'https://example.com'
- append = 'failed-so-hard'
- flexmock(module.requests).should_receive('get').with_args('{}/{}'.format(ping_url, append))
+ flexmock(module.requests).should_receive('get').with_args('{}/{}'.format(ping_url, 'run'))
- module.ping_cronitor(ping_url, 'config.yaml', dry_run=False, append=append)
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.START, dry_run=False)
-def test_ping_cronitor_without_ping_url_does_not_raise():
- flexmock(module.requests).should_receive('get').never()
+def test_ping_monitor_hits_ping_url_for_finish_state():
+ ping_url = 'https://example.com'
+ flexmock(module.requests).should_receive('get').with_args('{}/{}'.format(ping_url, 'complete'))
+
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.FINISH, dry_run=False)
+
+
+def test_ping_monitor_hits_ping_url_for_fail_state():
+ ping_url = 'https://example.com'
+ flexmock(module.requests).should_receive('get').with_args('{}/{}'.format(ping_url, 'fail'))
- module.ping_cronitor(ping_url=None, config_filename='config.yaml', dry_run=False, append='oops')
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.FAIL, dry_run=False)
-def test_ping_cronitor_dry_run_does_not_hit_ping_url():
+def test_ping_monitor_dry_run_does_not_hit_ping_url():
ping_url = 'https://example.com'
flexmock(module.requests).should_receive('get').never()
- module.ping_cronitor(ping_url, 'config.yaml', dry_run=True, append='yay')
+ module.ping_monitor(ping_url, 'config.yaml', module.monitor.State.START, dry_run=True)
diff --git a/tests/unit/hooks/test_dispatch.py b/tests/unit/hooks/test_dispatch.py
new file mode 100644
index 0000000..ec163d8
--- /dev/null
+++ b/tests/unit/hooks/test_dispatch.py
@@ -0,0 +1,68 @@
+import sys
+
+import pytest
+from flexmock import flexmock
+
+from borgmatic.hooks import dispatch as module
+
+
+def hook_function(config, log_prefix, thing, value):
+ '''
+ This test function gets mocked out below.
+ '''
+ pass
+
+
+def test_call_hook_invokes_module_function_with_arguments_and_returns_value():
+ hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
+ expected_return_value = flexmock()
+ test_module = sys.modules[__name__]
+ flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
+ flexmock(test_module).should_receive('hook_function').with_args(
+ hooks['super_hook'], 'prefix', 55, value=66
+ ).and_return(expected_return_value).once()
+
+ return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
+
+ assert return_value == expected_return_value
+
+
+def test_call_hook_without_hook_config_skips_call():
+ hooks = {'other_hook': flexmock()}
+ test_module = sys.modules[__name__]
+ flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
+ flexmock(test_module).should_receive('hook_function').never()
+
+ module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
+
+
+def test_call_hook_without_corresponding_module_raises():
+ hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
+ test_module = sys.modules[__name__]
+ flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module}
+ flexmock(test_module).should_receive('hook_function').never()
+
+ with pytest.raises(ValueError):
+ module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
+
+
+def test_call_hooks_calls_each_hook_and_collects_return_values():
+ hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
+ expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
+ flexmock(module).should_receive('call_hook').and_return(
+ expected_return_values['super_hook']
+ ).and_return(expected_return_values['other_hook'])
+
+ return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55)
+
+ assert return_values == expected_return_values
+
+
+def test_call_hooks_calls_skips_return_values_for_unconfigured_hooks():
+ hooks = {'super_hook': flexmock()}
+ expected_return_values = {'super_hook': flexmock()}
+ flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook'])
+
+ return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55)
+
+ assert return_values == expected_return_values
diff --git a/tests/unit/hooks/test_dump.py b/tests/unit/hooks/test_dump.py
new file mode 100644
index 0000000..d36f809
--- /dev/null
+++ b/tests/unit/hooks/test_dump.py
@@ -0,0 +1,183 @@
+import pytest
+from flexmock import flexmock
+
+from borgmatic.hooks import dump as module
+
+
+def test_make_database_dump_path_joins_arguments():
+ assert module.make_database_dump_path('/tmp', 'super_databases') == '/tmp/super_databases'
+
+
+def test_make_database_dump_path_defaults_without_source_directory():
+ assert module.make_database_dump_path(None, 'super_databases') == '~/.borgmatic/super_databases'
+
+
+def test_make_database_dump_filename_uses_name_and_hostname():
+ flexmock(module.os.path).should_receive('expanduser').and_return('databases')
+
+ assert (
+ module.make_database_dump_filename('databases', 'test', 'hostname')
+ == 'databases/hostname/test'
+ )
+
+
+def test_make_database_dump_filename_without_hostname_defaults_to_localhost():
+ flexmock(module.os.path).should_receive('expanduser').and_return('databases')
+
+ assert module.make_database_dump_filename('databases', 'test') == 'databases/localhost/test'
+
+
+def test_make_database_dump_filename_with_invalid_name_raises():
+ flexmock(module.os.path).should_receive('expanduser').and_return('databases')
+
+ with pytest.raises(ValueError):
+ module.make_database_dump_filename('databases', 'invalid/name')
+
+
+def test_flatten_dump_patterns_produces_list_of_all_patterns():
+ dump_patterns = {'postgresql_databases': ['*/glob', 'glob/*'], 'mysql_databases': ['*/*/*']}
+ expected_patterns = sorted(
+ dump_patterns['postgresql_databases'] + dump_patterns['mysql_databases']
+ )
+
+ assert sorted(module.flatten_dump_patterns(dump_patterns, ('bob',))) == expected_patterns
+
+
+def test_flatten_dump_patterns_with_no_patterns_errors():
+ dump_patterns = {'postgresql_databases': [], 'mysql_databases': []}
+
+ with pytest.raises(ValueError):
+ assert module.flatten_dump_patterns(dump_patterns, ('bob',))
+
+
+def test_flatten_dump_patterns_with_no_hooks_errors():
+ dump_patterns = {}
+
+ with pytest.raises(ValueError):
+ assert module.flatten_dump_patterns(dump_patterns, ('bob',))
+
+
+def test_remove_database_dumps_removes_dump_for_each_database():
+ databases = [{'name': 'foo'}, {'name': 'bar'}]
+ flexmock(module).should_receive('make_database_dump_filename').with_args(
+ 'databases', 'foo', None
+ ).and_return('databases/localhost/foo')
+ flexmock(module).should_receive('make_database_dump_filename').with_args(
+ 'databases', 'bar', None
+ ).and_return('databases/localhost/bar')
+
+ flexmock(module.os).should_receive('remove').with_args('databases/localhost/foo').once()
+ flexmock(module.os).should_receive('remove').with_args('databases/localhost/bar').once()
+ flexmock(module.os).should_receive('listdir').with_args('databases/localhost').and_return(
+ ['bar']
+ ).and_return([])
+
+ flexmock(module.os).should_receive('rmdir').with_args('databases/localhost').once()
+
+ module.remove_database_dumps('databases', databases, 'SuperDB', 'test.yaml', dry_run=False)
+
+
+def test_remove_database_dumps_with_dry_run_skips_removal():
+ databases = [{'name': 'foo'}, {'name': 'bar'}]
+ flexmock(module.os).should_receive('rmdir').never()
+ flexmock(module.os).should_receive('remove').never()
+
+ module.remove_database_dumps('databases', databases, 'SuperDB', 'test.yaml', dry_run=True)
+
+
+def test_remove_database_dumps_without_databases_does_not_raise():
+ module.remove_database_dumps('databases', [], 'SuperDB', 'test.yaml', dry_run=False)
+
+
+def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():
+ assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar']
+
+
+def test_get_database_names_from_dumps_gets_names_from_filenames_matching_globs():
+ flexmock(module.glob).should_receive('glob').and_return(
+ ('databases/localhost/foo',)
+ ).and_return(('databases/localhost/bar',)).and_return(())
+
+ assert module.get_database_names_from_dumps(
+ ('databases/*/foo', 'databases/*/bar', 'databases/*/baz')
+ ) == ['foo', 'bar']
+
+
+def test_get_database_configurations_only_produces_named_databases():
+ databases = [
+ {'name': 'foo', 'hostname': 'example.org'},
+ {'name': 'bar', 'hostname': 'example.com'},
+ {'name': 'baz', 'hostname': 'example.org'},
+ ]
+
+ assert list(module.get_database_configurations(databases, ('foo', 'baz'))) == [
+ {'name': 'foo', 'hostname': 'example.org'},
+ {'name': 'baz', 'hostname': 'example.org'},
+ ]
+
+
+def test_get_database_configurations_matches_all_database():
+ databases = [
+ {'name': 'foo', 'hostname': 'example.org'},
+ {'name': 'all', 'hostname': 'example.com'},
+ ]
+
+ assert list(module.get_database_configurations(databases, ('foo', 'bar', 'baz'))) == [
+ {'name': 'foo', 'hostname': 'example.org'},
+ {'name': 'bar', 'hostname': 'example.com'},
+ {'name': 'baz', 'hostname': 'example.com'},
+ ]
+
+
+def test_get_per_hook_database_configurations_partitions_by_hook():
+ hooks = {'postgresql_databases': [flexmock()]}
+ names = ('foo', 'bar')
+ dump_patterns = flexmock()
+ expected_config = {'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}
+ flexmock(module).should_receive('get_database_configurations').with_args(
+ hooks['postgresql_databases'], names
+ ).and_return(expected_config['postgresql_databases'])
+
+ config = module.get_per_hook_database_configurations(hooks, names, dump_patterns)
+
+ assert config == expected_config
+
+
+def test_get_per_hook_database_configurations_defaults_to_detected_database_names():
+ hooks = {'postgresql_databases': [flexmock()]}
+ names = ()
+ detected_names = flexmock()
+ dump_patterns = {'postgresql_databases': [flexmock()]}
+ expected_config = {'postgresql_databases': [flexmock()]}
+ flexmock(module).should_receive('get_database_names_from_dumps').and_return(detected_names)
+ flexmock(module).should_receive('get_database_configurations').with_args(
+ hooks['postgresql_databases'], detected_names
+ ).and_return(expected_config['postgresql_databases'])
+
+ config = module.get_per_hook_database_configurations(hooks, names, dump_patterns)
+
+ assert config == expected_config
+
+
+def test_get_per_hook_database_configurations_with_unknown_database_name_raises():
+ hooks = {'postgresql_databases': [flexmock()]}
+ names = ('foo', 'bar')
+ dump_patterns = flexmock()
+ flexmock(module).should_receive('get_database_configurations').with_args(
+ hooks['postgresql_databases'], names
+ ).and_return([])
+
+ with pytest.raises(ValueError):
+ module.get_per_hook_database_configurations(hooks, names, dump_patterns)
+
+
+def test_get_per_hook_database_configurations_with_all_and_no_archive_dumps_raises():
+ hooks = {'postgresql_databases': [flexmock()]}
+ names = ('foo', 'all')
+ dump_patterns = flexmock()
+ flexmock(module).should_receive('get_database_configurations').with_args(
+ hooks['postgresql_databases'], names
+ ).and_return([])
+
+ with pytest.raises(ValueError):
+ module.get_per_hook_database_configurations(hooks, names, dump_patterns)
diff --git a/tests/unit/hooks/test_healthchecks.py b/tests/unit/hooks/test_healthchecks.py
index 79eb621..24e8fca 100644
--- a/tests/unit/hooks/test_healthchecks.py
+++ b/tests/unit/hooks/test_healthchecks.py
@@ -3,38 +3,105 @@ from flexmock import flexmock
from borgmatic.hooks import healthchecks as module
-def test_ping_healthchecks_hits_ping_url():
- ping_url = 'https://example.com'
- flexmock(module.requests).should_receive('get').with_args(ping_url)
+def test_forgetful_buffering_handler_emit_collects_log_records():
+ handler = module.Forgetful_buffering_handler(byte_capacity=100)
+ handler.emit(flexmock(getMessage=lambda: 'foo'))
+ handler.emit(flexmock(getMessage=lambda: 'bar'))
- module.ping_healthchecks(ping_url, 'config.yaml', dry_run=False)
+ assert handler.buffer == ['foo\n', 'bar\n']
+ assert not handler.forgot
-def test_ping_healthchecks_without_ping_url_does_not_raise():
- flexmock(module.requests).should_receive('get').never()
+def test_forgetful_buffering_handler_emit_forgets_log_records_when_capacity_reached():
+ handler = module.Forgetful_buffering_handler(byte_capacity=len('foo\nbar\n'))
+ handler.emit(flexmock(getMessage=lambda: 'foo'))
+ assert handler.buffer == ['foo\n']
+ handler.emit(flexmock(getMessage=lambda: 'bar'))
+ assert handler.buffer == ['foo\n', 'bar\n']
+ handler.emit(flexmock(getMessage=lambda: 'baz'))
+ assert handler.buffer == ['bar\n', 'baz\n']
+ handler.emit(flexmock(getMessage=lambda: 'quux'))
+ assert handler.buffer == ['quux\n']
+ assert handler.forgot
- module.ping_healthchecks(ping_url_or_uuid=None, config_filename='config.yaml', dry_run=False)
+def test_format_buffered_logs_for_payload_flattens_log_buffer():
+ handler = module.Forgetful_buffering_handler(byte_capacity=100)
+ handler.buffer = ['foo\n', 'bar\n']
+ flexmock(module.logging).should_receive('getLogger').and_return(flexmock(handlers=[handler]))
-def test_ping_healthchecks_with_ping_uuid_hits_corresponding_url():
- ping_uuid = 'abcd-efgh-ijkl-mnop'
- flexmock(module.requests).should_receive('get').with_args(
- 'https://hc-ping.com/{}'.format(ping_uuid)
+ payload = module.format_buffered_logs_for_payload()
+
+ assert payload == 'foo\nbar\n'
+
+
+def test_format_buffered_logs_for_payload_inserts_truncation_indicator_when_logs_forgotten():
+ handler = module.Forgetful_buffering_handler(byte_capacity=100)
+ handler.buffer = ['foo\n', 'bar\n']
+ handler.forgot = True
+ flexmock(module.logging).should_receive('getLogger').and_return(flexmock(handlers=[handler]))
+
+ payload = module.format_buffered_logs_for_payload()
+
+ assert payload == '...\nfoo\nbar\n'
+
+
+def test_format_buffered_logs_for_payload_without_handler_produces_empty_payload():
+ flexmock(module.logging).should_receive('getLogger').and_return(
+ flexmock(handlers=[module.logging.Handler()])
+ )
+
+ payload = module.format_buffered_logs_for_payload()
+
+ assert payload == ''
+
+
+def test_ping_monitor_hits_ping_url_for_start_state():
+ flexmock(module).should_receive('Forgetful_buffering_handler')
+ ping_url = 'https://example.com'
+ flexmock(module.requests).should_receive('post').with_args(
+ '{}/{}'.format(ping_url, 'start'), data=''.encode('utf-8')
+ )
+
+ module.ping_monitor(ping_url, 'config.yaml', state=module.monitor.State.START, dry_run=False)
+
+
+def test_ping_monitor_hits_ping_url_for_finish_state():
+ ping_url = 'https://example.com'
+ payload = 'data'
+ flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload)
+ flexmock(module.requests).should_receive('post').with_args(
+ ping_url, data=payload.encode('utf-8')
)
- module.ping_healthchecks(ping_uuid, 'config.yaml', dry_run=False)
+ module.ping_monitor(ping_url, 'config.yaml', state=module.monitor.State.FINISH, dry_run=False)
-def test_ping_healthchecks_hits_ping_url_with_append():
+def test_ping_monitor_hits_ping_url_for_fail_state():
ping_url = 'https://example.com'
- append = 'failed-so-hard'
- flexmock(module.requests).should_receive('get').with_args('{}/{}'.format(ping_url, append))
+ payload = 'data'
+ flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload)
+ flexmock(module.requests).should_receive('post').with_args(
+ '{}/{}'.format(ping_url, 'fail'), data=payload.encode('utf')
+ )
+
+ module.ping_monitor(ping_url, 'config.yaml', state=module.monitor.State.FAIL, dry_run=False)
+
+
+def test_ping_monitor_with_ping_uuid_hits_corresponding_url():
+ ping_uuid = 'abcd-efgh-ijkl-mnop'
+ payload = 'data'
+ flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload)
+ flexmock(module.requests).should_receive('post').with_args(
+ 'https://hc-ping.com/{}'.format(ping_uuid), data=payload.encode('utf-8')
+ )
- module.ping_healthchecks(ping_url, 'config.yaml', dry_run=False, append=append)
+ module.ping_monitor(ping_uuid, 'config.yaml', state=module.monitor.State.FINISH, dry_run=False)
-def test_ping_healthchecks_dry_run_does_not_hit_ping_url():
+def test_ping_monitor_dry_run_does_not_hit_ping_url():
+ flexmock(module).should_receive('Forgetful_buffering_handler')
ping_url = 'https://example.com'
- flexmock(module.requests).should_receive('get').never()
+ flexmock(module.requests).should_receive('post').never()
- module.ping_healthchecks(ping_url, 'config.yaml', dry_run=True)
+ module.ping_monitor(ping_url, 'config.yaml', state=module.monitor.State.START, dry_run=True)
diff --git a/tests/unit/hooks/test_mysql.py b/tests/unit/hooks/test_mysql.py
new file mode 100644
index 0000000..6465d71
--- /dev/null
+++ b/tests/unit/hooks/test_mysql.py
@@ -0,0 +1,216 @@
+import sys
+
+from flexmock import flexmock
+
+from borgmatic.hooks import mysql as module
+
+
+def test_dump_databases_runs_mysqldump_for_each_database():
+ databases = [{'name': 'foo'}, {'name': 'bar'}]
+ output_file = flexmock()
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ ).and_return('databases/localhost/bar')
+ flexmock(module.os).should_receive('makedirs')
+ flexmock(sys.modules['builtins']).should_receive('open').and_return(output_file)
+
+ for name in ('foo', 'bar'):
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysqldump', '--add-drop-database', '--databases', name),
+ output_file=output_file,
+ extra_environment=None,
+ ).once()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_dump_databases_with_dry_run_skips_mysqldump():
+ databases = [{'name': 'foo'}, {'name': 'bar'}]
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ ).and_return('databases/localhost/bar')
+ flexmock(module.os).should_receive('makedirs').never()
+ flexmock(module).should_receive('execute_command').never()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=True)
+
+
+def test_dump_databases_runs_mysqldump_with_hostname_and_port():
+ databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
+ output_file = flexmock()
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/database.example.org/foo'
+ )
+ flexmock(module.os).should_receive('makedirs')
+ flexmock(sys.modules['builtins']).should_receive('open').and_return(output_file)
+
+ flexmock(module).should_receive('execute_command').with_args(
+ (
+ 'mysqldump',
+ '--add-drop-database',
+ '--host',
+ 'database.example.org',
+ '--port',
+ '5433',
+ '--protocol',
+ 'tcp',
+ '--databases',
+ 'foo',
+ ),
+ output_file=output_file,
+ extra_environment=None,
+ ).once()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_dump_databases_runs_mysqldump_with_username_and_password():
+ databases = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
+ output_file = flexmock()
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ )
+ flexmock(module.os).should_receive('makedirs')
+ flexmock(sys.modules['builtins']).should_receive('open').and_return(output_file)
+
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysqldump', '--add-drop-database', '--user', 'root', '--databases', 'foo'),
+ output_file=output_file,
+ extra_environment={'MYSQL_PWD': 'trustsome1'},
+ ).once()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_dump_databases_runs_mysqldump_with_options():
+ databases = [{'name': 'foo', 'options': '--stuff=such'}]
+ output_file = flexmock()
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ )
+ flexmock(module.os).should_receive('makedirs')
+ flexmock(sys.modules['builtins']).should_receive('open').and_return(output_file)
+
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysqldump', '--add-drop-database', '--stuff=such', '--databases', 'foo'),
+ output_file=output_file,
+ extra_environment=None,
+ ).once()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_dump_databases_runs_mysqldump_for_all_databases():
+ databases = [{'name': 'all'}]
+ output_file = flexmock()
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/all'
+ )
+ flexmock(module.os).should_receive('makedirs')
+ flexmock(sys.modules['builtins']).should_receive('open').and_return(output_file)
+
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysqldump', '--add-drop-database', '--all-databases'),
+ output_file=output_file,
+ extra_environment=None,
+ ).once()
+
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_make_database_dump_patterns_converts_names_to_glob_paths():
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/*/foo'
+ ).and_return('databases/*/bar')
+
+ assert module.make_database_dump_patterns(flexmock(), flexmock(), {}, ('foo', 'bar')) == [
+ 'databases/*/foo',
+ 'databases/*/bar',
+ ]
+
+
+def test_make_database_dump_patterns_treats_empty_names_as_matching_all_databases():
+ flexmock(module).should_receive('make_dump_path').and_return('/dump/path')
+ flexmock(module.dump).should_receive('make_database_dump_filename').with_args(
+ '/dump/path', '*', '*'
+ ).and_return('databases/*/*')
+
+ assert module.make_database_dump_patterns(flexmock(), flexmock(), {}, ()) == ['databases/*/*']
+
+
+def test_restore_database_dumps_restores_each_database():
+ databases = [{'name': 'foo'}, {'name': 'bar'}]
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ ).and_return('databases/localhost/bar')
+
+ for name in ('foo', 'bar'):
+ dump_filename = 'databases/localhost/{}'.format(name)
+ input_file = flexmock()
+ flexmock(sys.modules['builtins']).should_receive('open').with_args(
+ dump_filename
+ ).and_return(input_file)
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysql', '--batch'), input_file=input_file, extra_environment=None
+ ).once()
+
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_restore_database_dumps_runs_mysql_with_hostname_and_port():
+ databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ )
+ dump_filename = 'databases/localhost/foo'
+ input_file = flexmock()
+ flexmock(sys.modules['builtins']).should_receive('open').with_args(dump_filename).and_return(
+ input_file
+ )
+
+ flexmock(module).should_receive('execute_command').with_args(
+ (
+ 'mysql',
+ '--batch',
+ '--host',
+ 'database.example.org',
+ '--port',
+ '5433',
+ '--protocol',
+ 'tcp',
+ ),
+ input_file=input_file,
+ extra_environment=None,
+ ).once()
+
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
+
+
+def test_restore_database_dumps_runs_mysql_with_username_and_password():
+ databases = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
+ 'databases/localhost/foo'
+ )
+ dump_filename = 'databases/localhost/foo'
+ input_file = flexmock()
+ flexmock(sys.modules['builtins']).should_receive('open').with_args(dump_filename).and_return(
+ input_file
+ )
+
+ flexmock(module).should_receive('execute_command').with_args(
+ ('mysql', '--batch', '--user', 'root'),
+ input_file=input_file,
+ extra_environment={'MYSQL_PWD': 'trustsome1'},
+ ).once()
+
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py
index 7bf783e..a7c2302 100644
--- a/tests/unit/hooks/test_postgresql.py
+++ b/tests/unit/hooks/test_postgresql.py
@@ -1,31 +1,12 @@
-import pytest
from flexmock import flexmock
from borgmatic.hooks import postgresql as module
-def test_make_database_dump_filename_uses_name_and_hostname():
- flexmock(module.os.path).should_receive('expanduser').and_return('databases')
-
- assert module.make_database_dump_filename('test', 'hostname') == 'databases/hostname/test'
-
-
-def test_make_database_dump_filename_without_hostname_defaults_to_localhost():
- flexmock(module.os.path).should_receive('expanduser').and_return('databases')
-
- assert module.make_database_dump_filename('test') == 'databases/localhost/test'
-
-
-def test_make_database_dump_filename_with_invalid_name_raises():
- flexmock(module.os.path).should_receive('expanduser').and_return('databases')
-
- with pytest.raises(ValueError):
- module.make_database_dump_filename('invalid/name')
-
-
def test_dump_databases_runs_pg_dump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os).should_receive('makedirs')
@@ -45,27 +26,25 @@ def test_dump_databases_runs_pg_dump_for_each_database():
extra_environment=None,
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_with_dry_run_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
- flexmock(module.os).should_receive('makedirs')
+ flexmock(module.os).should_receive('makedirs').never()
flexmock(module).should_receive('execute_command').never()
- module.dump_databases(databases, 'test.yaml', dry_run=True)
-
-
-def test_dump_databases_without_databases_does_not_raise():
- module.dump_databases([], 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=True)
def test_dump_databases_runs_pg_dump_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module.os).should_receive('makedirs')
@@ -88,12 +67,13 @@ def test_dump_databases_runs_pg_dump_with_hostname_and_port():
extra_environment=None,
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_runs_pg_dump_with_username_and_password():
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os).should_receive('makedirs')
@@ -114,12 +94,13 @@ def test_dump_databases_runs_pg_dump_with_username_and_password():
extra_environment={'PGPASSWORD': 'trustsome1'},
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_runs_pg_dump_with_format():
databases = [{'name': 'foo', 'format': 'tar'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os).should_receive('makedirs')
@@ -138,12 +119,13 @@ def test_dump_databases_runs_pg_dump_with_format():
extra_environment=None,
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_runs_pg_dump_with_options():
databases = [{'name': 'foo', 'options': '--stuff=such'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os).should_receive('makedirs')
@@ -163,12 +145,13 @@ def test_dump_databases_runs_pg_dump_with_options():
extra_environment=None,
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_runs_pg_dumpall_for_all_databases():
databases = [{'name': 'all'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module.os).should_receive('makedirs')
@@ -178,105 +161,34 @@ def test_dump_databases_runs_pg_dumpall_for_all_databases():
extra_environment=None,
).once()
- module.dump_databases(databases, 'test.yaml', dry_run=False)
-
-
-def test_remove_database_dumps_removes_dump_for_each_database():
- databases = [{'name': 'foo'}, {'name': 'bar'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
- 'databases/localhost/foo'
- ).and_return('databases/localhost/bar')
- flexmock(module.os).should_receive('listdir').and_return([])
- flexmock(module.os).should_receive('rmdir')
-
- for name in ('foo', 'bar'):
- flexmock(module.os).should_receive('remove').with_args(
- 'databases/localhost/{}'.format(name)
- ).once()
-
- module.remove_database_dumps(databases, 'test.yaml', dry_run=False)
-
-
-def test_remove_database_dumps_with_dry_run_skips_removal():
- databases = [{'name': 'foo'}, {'name': 'bar'}]
- flexmock(module.os).should_receive('remove').never()
-
- module.remove_database_dumps(databases, 'test.yaml', dry_run=True)
-
-
-def test_remove_database_dumps_without_databases_does_not_raise():
- module.remove_database_dumps([], 'test.yaml', dry_run=False)
+ module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_make_database_dump_patterns_converts_names_to_glob_paths():
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/*/foo'
).and_return('databases/*/bar')
- assert module.make_database_dump_patterns(('foo', 'bar')) == [
+ assert module.make_database_dump_patterns(flexmock(), flexmock(), {}, ('foo', 'bar')) == [
'databases/*/foo',
'databases/*/bar',
]
def test_make_database_dump_patterns_treats_empty_names_as_matching_all_databases():
- flexmock(module).should_receive('make_database_dump_filename').with_args('*', '*').and_return(
- 'databases/*/*'
- )
-
- assert module.make_database_dump_patterns(()) == ['databases/*/*']
-
-
-def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():
- assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar']
-
-
-def test_get_database_names_from_dumps_gets_names_from_filenames_matching_globs():
- flexmock(module.glob).should_receive('glob').and_return(
- ('databases/localhost/foo',)
- ).and_return(('databases/localhost/bar',)).and_return(())
-
- assert module.get_database_names_from_dumps(
- ('databases/*/foo', 'databases/*/bar', 'databases/*/baz')
- ) == ['foo', 'bar']
-
-
-def test_get_database_configurations_only_produces_named_databases():
- databases = [
- {'name': 'foo', 'hostname': 'example.org'},
- {'name': 'bar', 'hostname': 'example.com'},
- {'name': 'baz', 'hostname': 'example.org'},
- ]
-
- assert list(module.get_database_configurations(databases, ('foo', 'baz'))) == [
- {'name': 'foo', 'hostname': 'example.org'},
- {'name': 'baz', 'hostname': 'example.org'},
- ]
+ flexmock(module).should_receive('make_dump_path').and_return('/dump/path')
+ flexmock(module.dump).should_receive('make_database_dump_filename').with_args(
+ '/dump/path', '*', '*'
+ ).and_return('databases/*/*')
-
-def test_get_database_configurations_matches_all_database():
- databases = [
- {'name': 'foo', 'hostname': 'example.org'},
- {'name': 'all', 'hostname': 'example.com'},
- ]
-
- assert list(module.get_database_configurations(databases, ('foo', 'bar', 'baz'))) == [
- {'name': 'foo', 'hostname': 'example.org'},
- {'name': 'bar', 'hostname': 'example.com'},
- {'name': 'baz', 'hostname': 'example.com'},
- ]
-
-
-def test_get_database_configurations_with_unknown_database_name_raises():
- databases = [{'name': 'foo', 'hostname': 'example.org'}]
-
- with pytest.raises(ValueError):
- list(module.get_database_configurations(databases, ('foo', 'bar')))
+ assert module.make_database_dump_patterns(flexmock(), flexmock(), {}, ()) == ['databases/*/*']
def test_restore_database_dumps_restores_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
@@ -299,18 +211,15 @@ def test_restore_database_dumps_restores_each_database():
extra_environment=None,
).once()
- module.restore_database_dumps(databases, 'test.yaml', dry_run=False)
-
-
-def test_restore_database_dumps_without_databases_does_not_raise():
- module.restore_database_dumps({}, 'test.yaml', dry_run=False)
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
def test_restore_database_dumps_runs_pg_restore_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
- ).and_return('databases/localhost/bar')
+ )
flexmock(module).should_receive('execute_command').with_args(
(
@@ -346,14 +255,15 @@ def test_restore_database_dumps_runs_pg_restore_with_hostname_and_port():
extra_environment=None,
).once()
- module.restore_database_dumps(databases, 'test.yaml', dry_run=False)
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
def test_restore_database_dumps_runs_pg_restore_with_username_and_password():
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
- flexmock(module).should_receive('make_database_dump_filename').and_return(
+ flexmock(module).should_receive('make_dump_path').and_return('')
+ flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
- ).and_return('databases/localhost/bar')
+ )
flexmock(module).should_receive('execute_command').with_args(
(
@@ -385,4 +295,4 @@ def test_restore_database_dumps_runs_pg_restore_with_username_and_password():
extra_environment={'PGPASSWORD': 'trustsome1'},
).once()
- module.restore_database_dumps(databases, 'test.yaml', dry_run=False)
+ module.restore_database_dumps(databases, 'test.yaml', {}, dry_run=False)
diff --git a/tests/unit/test_execute.py b/tests/unit/test_execute.py
index aba73c9..b6fcb04 100644
--- a/tests/unit/test_execute.py
+++ b/tests/unit/test_execute.py
@@ -1,62 +1,86 @@
-import logging
-
import pytest
from flexmock import flexmock
from borgmatic import execute as module
-def test_exit_code_indicates_error_with_borg_error_is_true():
- assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 2)
-
-
-def test_exit_code_indicates_error_with_borg_warning_is_false():
- assert not module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 1)
-
-
-def test_exit_code_indicates_error_with_borg_success_is_false():
- assert not module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 0)
-
-
-def test_exit_code_indicates_error_with_borg_error_and_error_on_warnings_is_true():
- assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 2, error_on_warnings=True)
-
-
-def test_exit_code_indicates_error_with_borg_warning_and_error_on_warnings_is_true():
- assert module.exit_code_indicates_error(('/usr/bin/borg1', 'init'), 1, error_on_warnings=True)
+@pytest.mark.parametrize(
+ 'exit_code,error_on_warnings,expected_result',
+ (
+ (2, True, True),
+ (2, False, True),
+ (1, True, True),
+ (1, False, False),
+ (0, True, False),
+ (0, False, False),
+ ),
+)
+def test_exit_code_indicates_error_respects_exit_code_and_error_on_warnings(
+ exit_code, error_on_warnings, expected_result
+):
+ assert (
+ module.exit_code_indicates_error(
+ ('command',), exit_code, error_on_warnings=error_on_warnings
+ )
+ is expected_result
+ )
-def test_exit_code_indicates_error_with_borg_success_and_error_on_warnings_is_false():
- assert not module.exit_code_indicates_error(
- ('/usr/bin/borg1', 'init'), 0, error_on_warnings=True
- )
+def test_execute_command_calls_full_command():
+ full_command = ['foo', 'bar']
+ flexmock(module.os, environ={'a': 'b'})
+ flexmock(module.subprocess).should_receive('Popen').with_args(
+ full_command,
+ stdin=None,
+ stdout=module.subprocess.PIPE,
+ stderr=module.subprocess.STDOUT,
+ shell=False,
+ env=None,
+ cwd=None,
+ ).and_return(flexmock(stdout=None)).once()
+ flexmock(module).should_receive('log_output')
+ output = module.execute_command(full_command)
-def test_exit_code_indicates_error_with_non_borg_error_is_true():
- assert module.exit_code_indicates_error(('/usr/bin/command',), 2)
+ assert output is None
-def test_exit_code_indicates_error_with_non_borg_warning_is_true():
- assert module.exit_code_indicates_error(('/usr/bin/command',), 1)
+def test_execute_command_calls_full_command_with_output_file():
+ full_command = ['foo', 'bar']
+ output_file = flexmock(name='test')
+ flexmock(module.os, environ={'a': 'b'})
+ flexmock(module.subprocess).should_receive('Popen').with_args(
+ full_command,
+ stdin=None,
+ stdout=output_file,
+ stderr=module.subprocess.PIPE,
+ shell=False,
+ env=None,
+ cwd=None,
+ ).and_return(flexmock(stderr=None)).once()
+ flexmock(module).should_receive('log_output')
+ output = module.execute_command(full_command, output_file=output_file)
-def test_exit_code_indicates_error_with_non_borg_success_is_false():
- assert not module.exit_code_indicates_error(('/usr/bin/command',), 0)
+ assert output is None
-def test_execute_command_calls_full_command():
+def test_execute_command_calls_full_command_with_input_file():
full_command = ['foo', 'bar']
+ input_file = flexmock(name='test')
flexmock(module.os, environ={'a': 'b'})
- flexmock(module).should_receive('execute_and_log_output').with_args(
+ flexmock(module.subprocess).should_receive('Popen').with_args(
full_command,
- output_log_level=logging.INFO,
+ stdin=input_file,
+ stdout=module.subprocess.PIPE,
+ stderr=module.subprocess.STDOUT,
shell=False,
- environment=None,
- working_directory=None,
- error_on_warnings=False,
- ).once()
+ env=None,
+ cwd=None,
+ ).and_return(flexmock(stdout=None)).once()
+ flexmock(module).should_receive('log_output')
- output = module.execute_command(full_command)
+ output = module.execute_command(full_command, input_file=input_file)
assert output is None
@@ -64,14 +88,16 @@ def test_execute_command_calls_full_command():
def test_execute_command_calls_full_command_with_shell():
full_command = ['foo', 'bar']
flexmock(module.os, environ={'a': 'b'})
- flexmock(module).should_receive('execute_and_log_output').with_args(
+ flexmock(module.subprocess).should_receive('Popen').with_args(
full_command,
- output_log_level=logging.INFO,
+ stdin=None,
+ stdout=module.subprocess.PIPE,
+ stderr=module.subprocess.STDOUT,
shell=True,
- environment=None,
- working_directory=None,
- error_on_warnings=False,
- ).once()
+ env=None,
+ cwd=None,
+ ).and_return(flexmock(stdout=None)).once()
+ flexmock(module).should_receive('log_output')
output = module.execute_command(full_command, shell=True)
@@ -81,14 +107,16 @@ def test_execute_command_calls_full_command_with_shell():
def test_execute_command_calls_full_command_with_extra_environment():
full_command = ['foo', 'bar']
flexmock(module.os, environ={'a': 'b'})
- flexmock(module).should_receive('execute_and_log_output').with_args(
+ flexmock(module.subprocess).should_receive('Popen').with_args(
full_command,
- output_log_level=logging.INFO,
+ stdin=None,
+ stdout=module.subprocess.PIPE,
+ stderr=module.subprocess.STDOUT,
shell=False,
- environment={'a': 'b', 'c': 'd'},
- working_directory=None,
- error_on_warnings=False,
- ).once()
+ env={'a': 'b', 'c': 'd'},
+ cwd=None,
+ ).and_return(flexmock(stdout=None)).once()
+ flexmock(module).should_receive('log_output')
output = module.execute_command(full_command, extra_environment={'c': 'd'})
@@ -98,37 +126,22 @@ def test_execute_command_calls_full_command_with_extra_environment():
def test_execute_command_calls_full_command_with_working_directory():
full_command = ['foo', 'bar']
flexmock(module.os, environ={'a': 'b'})
- flexmock(module).should_receive('execute_and_log_output').with_args(
+ flexmock(module.subprocess).should_receive('Popen').with_args(
full_command,
- output_log_level=logging.INFO,
+ stdin=None,
+ stdout=module.subprocess.PIPE,
+ stderr=module.subprocess.STDOUT,
shell=False,
- environment=None,
- working_directory='/working',
- error_on_warnings=False,
- ).once()
+ env=None,
+ cwd='/working',
+ ).and_return(flexmock(stdout=None)).once()
+ flexmock(module).should_receive('log_output')
output = module.execute_command(full_command, working_directory='/working')
assert output is None
-def test_execute_command_calls_full_command_with_error_on_warnings():
- full_command = ['foo', 'bar']
- flexmock(module.os, environ={'a': 'b'})
- flexmock(module).should_receive('execute_and_log_output').with_args(
- full_command,
- output_log_level=logging.INFO,
- shell=False,
- environment=None,
- working_directory=None,
- error_on_warnings=True,
- ).once()
-
- output = module.execute_command(full_command, error_on_warnings=True)
-
- assert output is None
-
-
def test_execute_command_captures_output():
full_command = ['foo', 'bar']
expected_output = '[]'
diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py
index 91b2554..707376b 100644
--- a/tests/unit/test_logger.py
+++ b/tests/unit/test_logger.py
@@ -22,14 +22,14 @@ def test_to_bool_passes_none_through():
def test_interactive_console_false_when_not_isatty(capsys):
with capsys.disabled():
- flexmock(module.sys.stdout).should_receive('isatty').and_return(False)
+ flexmock(module.sys.stderr).should_receive('isatty').and_return(False)
assert module.interactive_console() is False
def test_interactive_console_false_when_TERM_is_dumb(capsys):
with capsys.disabled():
- flexmock(module.sys.stdout).should_receive('isatty').and_return(True)
+ flexmock(module.sys.stderr).should_receive('isatty').and_return(True)
flexmock(module.os.environ).should_receive('get').with_args('TERM').and_return('dumb')
assert module.interactive_console() is False
@@ -37,7 +37,7 @@ def test_interactive_console_false_when_TERM_is_dumb(capsys):
def test_interactive_console_true_when_isatty_and_TERM_is_not_dumb(capsys):
with capsys.disabled():
- flexmock(module.sys.stdout).should_receive('isatty').and_return(True)
+ flexmock(module.sys.stderr).should_receive('isatty').and_return(True)
flexmock(module.os.environ).should_receive('get').with_args('TERM').and_return('smart')
assert module.interactive_console() is True
@@ -113,6 +113,17 @@ def test_should_do_markup_prefers_PY_COLORS_to_interactive_console_value():
assert module.should_do_markup(no_color=False, configs={}) is True
+def test_multi_stream_handler_logs_to_handler_for_log_level():
+ error_handler = flexmock()
+ error_handler.should_receive('emit').once()
+ info_handler = flexmock()
+
+ multi_handler = module.Multi_stream_handler(
+ {module.logging.ERROR: error_handler, module.logging.INFO: info_handler}
+ )
+ multi_handler.emit(flexmock(levelno=module.logging.ERROR))
+
+
def test_console_color_formatter_format_includes_log_message():
plain_message = 'uh oh'
record = flexmock(levelno=logging.CRITICAL, msg=plain_message)
@@ -132,6 +143,9 @@ def test_color_text_without_color_does_not_raise():
def test_configure_logging_probes_for_log_socket_on_linux():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
flexmock(module).should_receive('Console_color_formatter')
flexmock(module).should_receive('interactive_console').and_return(False)
flexmock(module.logging).should_receive('basicConfig').with_args(
@@ -147,6 +161,9 @@ def test_configure_logging_probes_for_log_socket_on_linux():
def test_configure_logging_probes_for_log_socket_on_macos():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
flexmock(module).should_receive('Console_color_formatter')
flexmock(module).should_receive('interactive_console').and_return(False)
flexmock(module.logging).should_receive('basicConfig').with_args(
@@ -163,6 +180,9 @@ def test_configure_logging_probes_for_log_socket_on_macos():
def test_configure_logging_sets_global_logger_to_most_verbose_log_level():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
flexmock(module).should_receive('Console_color_formatter')
flexmock(module.logging).should_receive('basicConfig').with_args(
level=logging.DEBUG, handlers=tuple
@@ -173,6 +193,9 @@ def test_configure_logging_sets_global_logger_to_most_verbose_log_level():
def test_configure_logging_skips_syslog_if_not_found():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
flexmock(module).should_receive('Console_color_formatter')
flexmock(module.logging).should_receive('basicConfig').with_args(
level=logging.INFO, handlers=tuple
@@ -184,6 +207,9 @@ def test_configure_logging_skips_syslog_if_not_found():
def test_configure_logging_skips_syslog_if_interactive_console():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
flexmock(module).should_receive('Console_color_formatter')
flexmock(module).should_receive('interactive_console').and_return(True)
flexmock(module.logging).should_receive('basicConfig').with_args(
@@ -196,6 +222,10 @@ def test_configure_logging_skips_syslog_if_interactive_console():
def test_configure_logging_to_logfile_instead_of_syslog():
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
+
# syslog skipped in non-interactive console if --log-file argument provided
flexmock(module).should_receive('interactive_console').and_return(False)
flexmock(module.logging).should_receive('basicConfig').with_args(
@@ -203,10 +233,10 @@ def test_configure_logging_to_logfile_instead_of_syslog():
)
flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(True)
flexmock(module.logging.handlers).should_receive('SysLogHandler').never()
- file_handler = logging.FileHandler('/tmp/logfile')
- flexmock(module.logging).should_receive('FileHandler').with_args('/tmp/logfile').and_return(
- file_handler
- ).once()
+ file_handler = logging.handlers.WatchedFileHandler('/tmp/logfile')
+ flexmock(module.logging.handlers).should_receive('WatchedFileHandler').with_args(
+ '/tmp/logfile'
+ ).and_return(file_handler).once()
module.configure_logging(
console_log_level=logging.INFO, log_file_log_level=logging.DEBUG, log_file='/tmp/logfile'
@@ -214,12 +244,16 @@ def test_configure_logging_to_logfile_instead_of_syslog():
def test_configure_logging_skips_logfile_if_argument_is_none():
- # No FileHandler added if argument --log-file is None
+ flexmock(module).should_receive('Multi_stream_handler').and_return(
+ flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None)
+ )
+
+ # No WatchedFileHandler added if argument --log-file is None
flexmock(module).should_receive('interactive_console').and_return(False)
flexmock(module.logging).should_receive('basicConfig').with_args(
level=logging.INFO, handlers=tuple
)
flexmock(module.os.path).should_receive('exists').and_return(False)
- flexmock(module.logging).should_receive('FileHandler').never()
+ flexmock(module.logging.handlers).should_receive('WatchedFileHandler').never()
module.configure_logging(console_log_level=logging.INFO, log_file=None)
diff --git a/tox.ini b/tox.ini
index 92c3a33..b6b0537 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,7 +2,7 @@
envlist = py35,py36,py37,py38
skip_missing_interpreters = True
skipsdist = True
-minversion = 3.14.0
+minversion = 3.14.1
[testenv]
usedevelop = True
@@ -10,8 +10,7 @@ deps = -rtest_requirements.txt
whitelist_externals =
find
sh
-commands_pre =
- find {toxinidir} -type f -not -path '{toxinidir}/.tox/*' -path '*/__pycache__/*' -name '*.py[c|o]' -delete
+passenv = COVERAGE_FILE
commands =
pytest {posargs}
py36,py37,py38: black --check .
@@ -28,6 +27,7 @@ commands =
[testenv:end-to-end]
deps = -rtest_requirements.txt
+passenv = COVERAGE_FILE
commands =
pytest {posargs} --no-cov tests/end-to-end