ZFS Automatic Snapshot Daemon
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

172 lines
5.3 KiB

4 years ago
from sys import argv, executable
from os import environ, getcwd, sep as psep
from os.path import dirname, abspath, join as joinpath, \
expanduser, splitdrive, isfile
from time import sleep
from copy import deepcopy
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
import logging
import pprint
from zasd.logging import *
from zasd.util import *
#
# Constants
CONFIG_BASENAME = 'zasd.conf.py'
CONFIG_FILENAMES = [CONFIG_BASENAME, '.' + CONFIG_BASENAME]
# Default configuration
DEFAULT_CONFIG = {
'zfs_path': '/usr/local/bin/zfs',
'fswatch_path': '/usr/local/bin/fswatch',
'tab_size': 2,
'log_level': logging.INFO,
'log_format': '%(asctime)s %(name)s [%(levelname)-8s]: %(message)s',
'log_date_format': '%a, %d %b %Y, %H:%M:%S',
'separator': ':',
'destroy_trigger': CronTrigger.from_crontab('* * * * *'),
# Defaults will take a snapshot every 12 hours
# and keep them for a week, so if the config file
# should disappear for some reason, there will
# at least be _some_ recoverable snapshots
'defaults': {
'disabled': False,
'filesystems': ['tank'],
'recursive': True,
'tag': 'zasd',
'trigger': IntervalTrigger(hours=12),
'priority': 1,
'keep': 14
},
'schedules': [{}]
}
# Load configuration
def load_config():
global logger
if len(argv) > 1:
# Configuration pathname given as first argument
if isfile(argv[1]):
config_pathname = argv[1]
else:
logger.warning('Could not find configuration file %s', argv[1])
return _warn_load_default()
else:
# No configuration pathname given; attempt to find it:
# Get root of system partition
sys_root_path = environ.get('SystemDrive', splitdrive(executable)[0] + psep)
# Get system configuration directory
sys_conf_path = joinpath(*ifenv('SystemRoot',
lambda path: [path, 'System32', 'drivers', 'etc'],
lambda: [sys_root_path, 'etc']))
# Get user home directory
user_home_path = expanduser('~')
# Get path of this Python file
if '__file__' in globals():
script_path = dirname(abspath(__file__))
else:
script_path = dirname(abspath(argv[0]))
# Build list of configuration file pathnames to search
config_paths = uniq([
getcwd(),
user_home_path,
joinpath(user_home_path, '.config'),
joinpath(user_home_path, '.local', 'share'),
sys_conf_path,
script_path])
config_pathnames = list(joinpath(p, f) for p in config_paths for f in CONFIG_FILENAMES)
# Attempt to find a config file
config_pathname = find_file(config_pathnames)
if config_pathname is None:
logger.warning('Unable to find a config file at:')
for pathname in config_pathnames:
logger.warning(' ' + pathname)
return _warn_load_default()
with open(config_pathname, 'rt', encoding='utf-8') as f:
config_source = f.read()
# Create configuration file scopes
global_scope = dict(
CRITICAL = logging.CRITICAL,
ERROR = logging.ERROR,
WARNING = logging.WARNING,
INFO = logging.INFO,
DEBUG = logging.DEBUG,
NOTSET = logging.NOTSET,
crontab = CronTrigger.from_crontab,
cron = CronTrigger.from_crontab,
interval = IntervalTrigger,
every = IntervalTrigger)
local_scope = dict()
# Execute configuration file
exec(config_source, global_scope, local_scope)
# Merge configuration with default configuration
config = merge_configs(DEFAULT_CONFIG, local_scope)
logger.debug('Loaded configuration')
if config['log_level'] <= logging.DEBUG:
logger.debug('')
for line in pprint.pformat(config).split('\n'):
logging.debug(config['tab_size'] * ' ' + line)
logger.debug('')
return config
def _warn_load_default():
global DEFAULT_CONFIG
logger.warning('')
logger.warning('Waiting 10 seconds before loading default configuration...')
logger.warning('')
sleep(10)
logger.warning('Loading default configuration')
logger.warning('')
return deepcopy(DEFAULT_CONFIG)
def merge_configs(base, diff, path=[]):
base = base if len(path) == 0 else deepcopy(base)
for key, value in diff.items():
if not key in base:
base[key] = value
elif not isinstance(value, type(base[key])):
logger.error('Cannot merge diff type %s with base %s type at %s.%s',
type(value), type(base[key]), '.'.join(path), key)
return None
elif isinstance(value, dict):
merged = merge_configs(base[key], value, path + [key])
if merged is None:
return None
base[key] = merged
else:
base[key] = value
return base