Commit 894b1163 authored by Chris Snijder's avatar Chris Snijder 🏅
Browse files

Merge branch '62-allow-running-one-off-for-debugging-purposes' into 'master'

Resolve "Allow running one-off for debugging purposes"

Closes #62

See merge request !44
parents ea51be2c f5996909
Pipeline #6268 passed with stages
in 7 minutes and 13 seconds
......@@ -86,9 +86,7 @@ test:stretch:
- openssl version
- apt-get install -y -q ./dist/stapled_*all.deb
- /refresh_testdata.sh
- stapled -p /tmp/testdata/ --recursive --interactive --no-haproxy-sockets -vvvv &
- sleep 15
- ls /tmp/testdata/**/chain.pem.ocsp
- stapled -p /tmp/testdata/ --recursive --interactive --no-haproxy-sockets -vvvv --one-off
dependencies:
- build:package
......@@ -102,6 +100,4 @@ source:dev-setup:
- openssl version
- pip3 install -e .
- ./refresh_testdata.sh
- stapled -p /tmp/testdata/ --recursive --interactive --no-haproxy-sockets -vvvv &
- sleep 15
- ls /tmp/testdata/**/chain.pem.ocsp
- stapled -p /tmp/testdata/ --recursive --interactive --no-haproxy-sockets -vvvv --one-off
......@@ -92,11 +92,11 @@ haproxy-sockets=[/var/run/haproxy/admin.sock]
;; merged in the path to socket mapping.
; haproxy-config=/etc/haproxy/haproxy.cfg
;; Set a keep alive time in seconds after wich the connection to the HAProxy
;; sockets is terminated. The minimum allowed value is 10 seconds, because
;; Set a keep alive time in seconds after which the connection to the HAProxy
;; sockets is terminated. The minimum allowed value is 1 second, because
;; stapled will take at least a bit of time to communicate with HAProxy, and
;; either process could be "busy".
; haproxy-socket-keepalive=3600
; haproxy-socket-keepalive=10
;; Don't output anything to stdout, can be used together with `logdir`
......
# -*- coding: utf-8 -*-
"""
Initialise the stapled module.
......
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Parse command line arguments and starts the OCSP Staple daemon.
......@@ -39,12 +38,12 @@ import daemon
import stapled
import stapled.core.daemon
import stapled.core.excepthandler
from stapled.core.excepthandler import handle_file_error
from stapled.core.exceptions import ArgumentError
from stapled.util.haproxy import parse_haproxy_config
from stapled.colourlog import ColourFormatter
from stapled.version import __version__, __app_name__
from stapled.util.functions import unique
from stapled.util.exitcode import ExitCodeTracker
#: :attr:`logging.format` format string for log files and syslog
LOGFORMAT = (
......@@ -228,13 +227,13 @@ def get_cli_arg_parser():
parser.add(
'--haproxy-socket-keepalive',
type=int,
default=3600,
default=10,
metavar="KEEP-ALIVE <seconds, minimum: 10>",
help=(
"HAProxy sockets are kept open for performance reasons, you can "
"set the amount of seconds sockets should remain open "
"(default=3600). Note that a short amount of time is required to "
"to pass messages to HAProxy, so 10 seconds if the minimum "
"(default=10). Note that a short amount of time is required to "
"to pass messages to HAProxy, so 1 second if the minimum "
"accepted value."
)
)
......@@ -316,6 +315,16 @@ def get_cli_arg_parser():
"DEPRECATED, please see ``--cert-paths``."
)
)
parser.add(
'--one-off',
action='store_true',
default=False,
help=(
"Index cert_paths and fetch staples only once and then exit. "
"This overrides the --refresh-interval argument. The --daemon and "
"--no-daemon arguments are also ignored."
)
)
return parser
......@@ -329,7 +338,7 @@ def init():
"""
args = __get_validated_args()
log_file_handles = __init_logging(args)
log_file_handles, exit_code_tracker = __init_logging(args)
# Get a mapping of configured sockets and certificate directories from:
# haproxy config, stapled config and command line arguments
......@@ -350,10 +359,12 @@ def init():
file_extensions=args.file_extensions,
renewal_threads=args.renewal_threads,
refresh_interval=args.refresh_interval,
one_off=args.one_off,
minimum_validity=args.minimum_validity,
recursive=args.recursive,
no_recycle=args.no_recycle,
ignore=args.ignore
ignore=args.ignore,
exit_code_tracker=exit_code_tracker
)
if stapled.LOCAL_LIB_MODE:
......@@ -449,12 +460,19 @@ def __init_logging(args):
logging.Formatter(LOGFORMAT, TIMESTAMP_FORMAT)
)
logger.addHandler(syslog_handler)
return log_file_handles
if args.one_off:
# Keep track of errors so we can return a greater than 0 exit code when
# errors occurred.
exit_code_tracker = ExitCodeTracker(logging.WARN)
logger.addHandler(exit_code_tracker)
else:
exit_code_tracker = None
return log_file_handles, exit_code_tracker
def __get_haproxy_socket_mapping(args):
"""
Get mapping of configured sockets and certificate directories.
Get a mapping of configured sockets and certificate directories.
From: haproxy config, stapled config and command line arguments.
......@@ -473,8 +491,8 @@ def __get_haproxy_socket_mapping(args):
conf_cert_paths, conf_haproxy_sockets = parse_haproxy_config(
args.haproxy_config
)
except (OSError, IOError) as exc:
logger.critical(handle_file_error(exc))
except (OSError) as exc:
logger.critical(exc)
exit(1)
# Combine the socket and certificate paths of the arguments and config
......@@ -495,7 +513,7 @@ def __get_haproxy_socket_mapping(args):
def __get_validated_args():
"""
Parse and validate CLI arguments and configuration.
Check that arguments make sense.
Checks should match the restrictions in the usage help messages.
......@@ -504,17 +522,62 @@ def __get_validated_args():
parser = get_cli_arg_parser()
args = parser.parse_args()
try:
if args.haproxy_socket_keepalive < 10:
if args.haproxy_socket_keepalive < 1:
raise ArgumentError(
"`--haproxy-socket-keepalive` should be higher than 10."
"`--haproxy-socket-keepalive` should be 1 or higher."
)
except ArgumentError as exc:
parser.print_usage(sys.stderr)
logger.critical("Invalid command line argument or value: %s", exc)
exit(1)
# Run in one-off mode, run once then exit.
if args.one_off:
args.refresh_interval = None
args.daemon = False
return args
def __get_haproxy_socket_mapping(args):
"""
Get mapping of configured sockets and certificate directories.
From: haproxy config, stapled config and command line arguments.
:param Namespace args: Argparser argument list.
:return dict Of cert-paths and sockets for inform of changes.
"""
# Parse the cert_paths argument
arg_cert_paths = __get_arg_cert_paths(args)
# Parse haproxy_sockets argument.
arg_haproxy_sockets = __get_arg_haproxy_sockets(args)
# Make a mapping from certificate paths to sockets in a dict.
mapping = dict(zip(arg_cert_paths, arg_haproxy_sockets))
# Parse HAProxy config files.
try:
conf_cert_paths, conf_haproxy_sockets = parse_haproxy_config(
args.haproxy_config
)
except (OSError) as exc:
logger.critical(exc)
exit(1)
# Combine the socket and certificate paths of the arguments and config
# files in the sockets dictionary.
for i, paths in enumerate(conf_cert_paths):
for path in paths:
if path in mapping:
mapping[path] = unique(
mapping[path] + conf_haproxy_sockets[i],
preserve_order=False
)
else:
mapping[path] = conf_haproxy_sockets[i]
logger.debug("Paths to socket mapping: %s", str(mapping))
return mapping
if __name__ == '__main__':
try:
init()
......
# -*- coding: utf-8 -*-
"""
ANSI colourise the logging stream (works on LINUX/UNIX based systems).
......
# -*- coding: utf-8 -*-
"""
Test the ColourFormatter class when run directly.
"""
......
# -*- coding: utf-8 -*-
"""
This module locates certificate files in the supplied paths and parses
them. It then keeps track of the following:
Locate certificate files in the supplied paths and parse them.
It also keeps track of the following:
- If cert is found for the first time (thus also when the daemon is started),
the cert is added to the :attr:`stapled.core.certfinder.CertFinder.scheduler`
......@@ -28,7 +28,6 @@ import logging
import fnmatch
import os
import errno
import stapled
from stapled.core.excepthandler import stapled_except_handle
from stapled.core.taskcontext import StapleTaskContext
from stapled.core.certmodel import CertModel
......@@ -39,7 +38,8 @@ LOG = logging.getLogger(__name__)
class CertFinderThread(threading.Thread):
"""
This searches paths for certificate files.
A thread that searches paths for certificate files.
When found, models are created for the certificate files, which are wrapped
in a :class:`stapled.core.taskcontext.StapleTaskContext` which are then
scheduled to be processed by the
......@@ -48,19 +48,18 @@ class CertFinderThread(threading.Thread):
Pass ``refresh_interval=None`` if you want to run it only once (e.g. for
testing)
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, *args, **kwargs):
"""
Initialise the thread with its parent :class:`threading.Thread` and its
arguments.
Initialise with parent :class:`threading.Thread` and its arguments.
:kwarg dict models: A dict to maintain a model cache **(required)**.
:kwarg iter cert_paths: The paths to index **(required)**.
:kwarg stapled.scheduling.SchedulerThread scheduler: The scheduler
object where we add new parse tasks to. **(required)**.
:kwarg int refresh_interval: The minimum amount of time (s)
between search runs, defaults to 10 seconds. Set to None to run
only once **(optional)**.
:kwarg int refresh_interval: The minimum amount of time (s) between
search runs. Set to None (default) to run once **(optional)**.
:kwarg array file_extensions: An array containing the file extensions
of file types to check for certificate content **(optional)**.
"""
......@@ -68,12 +67,8 @@ class CertFinderThread(threading.Thread):
self.models = kwargs.pop('models', None)
self.cert_paths = kwargs.pop('cert_paths', None)
self.scheduler = kwargs.pop('scheduler', None)
self.refresh_interval = kwargs.pop(
'refresh_interval', stapled.DEFAULT_REFRESH_INTERVAL
)
self.file_extensions = kwargs.pop(
'file_extensions', stapled.FILE_EXTENSIONS_DEFAULT
)
self.refresh_interval = kwargs.pop('refresh_interval', None)
self.file_extensions = kwargs.pop('file_extensions', None)
self.last_refresh = None
self.ignore = kwargs.pop('ignore', []) or []
self.recursive = kwargs.pop('recursive', False)
......@@ -84,13 +79,21 @@ class CertFinderThread(threading.Thread):
assert self.cert_paths is not None, \
"At least one path should be passed for indexing."
assert self.file_extensions is not None, \
"Please specify file extensions to search for certificates."
assert self.scheduler is not None, \
"Please pass a scheduler to get tasks from and add tasks to."
super(CertFinderThread, self).__init__(*args, **kwargs)
def run(self):
"""Start the certificate finder thread."""
"""
Start the certificate finder thread.
The "scheduling" mentioned in this method does not use the scheduler.
It will sleep instead, only because it is simpler.
"""
LOG.info("Scanning paths: '%s'", "', '".join(self.cert_paths))
while not self.stop:
# Catch any exceptions within this context to protect the thread.
......@@ -103,7 +106,7 @@ class CertFinderThread(threading.Thread):
since_last = time.time() - self.last_refresh
# Check if the last refresh took longer than the interval..
if since_last > self.refresh_interval:
# It did so start right now..
# It did take longer than the interval so, start right now
LOG.info(
"Starting a new refresh immediately because the last "
"refresh took %0.3f seconds while the minimum "
......@@ -131,6 +134,8 @@ class CertFinderThread(threading.Thread):
def refresh(self):
"""
Refresh the index.
Wrap up the internal :meth:`CertFinder._update_cached_certs()` and
:meth:`CertFinder._find_new_certs()` functions.
......@@ -167,7 +172,7 @@ class CertFinderThread(threading.Thread):
dirs = []
try:
dirs = os.listdir(path)
except (OSError, IOError) as exc:
except (OSError) as exc:
# If a path is actually a file we can still use it..
if exc.errno == errno.ENOTDIR and os.path.isfile(path):
LOG.debug("%s may be a single file", path)
......@@ -206,7 +211,7 @@ class CertFinderThread(threading.Thread):
sched_time=None
)
self.scheduler.add_task(context)
except (IOError, OSError) as exc:
except (OSError) as exc:
# If the directory is unreadable this gets printed at every
# refresh until the directory is readable. We catch this here
# so any readable directory can still be scanned.
......
# -*- coding: utf-8 -*-
"""
This module defines the :class:`stapled.core.certmodel.CertModel` class which is
used to keep track of certificates that are found by the
......
# -*- coding: utf-8 -*-
"""
This module parses certificate in a queue so the data contained in the
certificate can be used to request OCSP responses. After parsing a new
......
# -*- coding: utf-8 -*-
"""
This module bootstraps the stapled process by starting threads for:
......@@ -57,7 +56,7 @@ from stapled.core.certfinder import CertFinderThread
from stapled.core.certparser import CertParserThread
from stapled.core.staplerenewer import StapleRenewerThread
from stapled.core.stapleadder import StapleAdder
from stapled.scheduling import SchedulerThread
from stapled.scheduling import SchedulerThread, QueueError
from stapled import MAX_RESTART_THREADS
LOG = logging.getLogger(__name__)
......@@ -79,7 +78,7 @@ class Stapledaemon(object):
:kwarg list file_extensions: List of file extensions to search for
certificates.
:kwarg int renewal_threads: Amount of staple renewal threads.
:kwarg int refresh_interval: Interval between re-indexing of
:kwarg NoneType|int refresh_interval: Interval between re-indexing of
certificate paths.
:kwarg int minimum_validity: Minimum validity of stapled before
renewing.
......@@ -97,9 +96,11 @@ class Stapledaemon(object):
self.file_extensions = self.file_extensions.replace(" ", "").split(",")
self.renewal_threads = kwargs.pop('renewal_threads')
self.refresh_interval = kwargs.pop('refresh_interval')
self.one_off = kwargs.pop('one_off')
self.minimum_validity = kwargs.pop('minimum_validity')
self.recursive = kwargs.pop('recursive')
self.no_recycle = kwargs.pop('no_recycle')
self.exit_code_tracker = kwargs.pop('exit_code_tracker')
self.ignore = []
rel_path_re = re.compile(r'^\.+\/')
......@@ -136,21 +137,24 @@ class Stapledaemon(object):
# Scheduler thread
self.scheduler = self.start_scheduler_thread()
self.staple_adder = None
# Start proxy adder thread if sockets were supplied
if self.haproxy_socket_mapping:
self.start_staple_adder_thread()
self.staple_adder = self.start_staple_adder_thread()
# Start ocsp response gathering threads
threads_list = []
self.renewers = []
for tid in range(0, self.renewal_threads):
threads_list.append(self.start_renewer_thread(tid))
self.renewers.append(self.start_renewer_thread(tid))
# Start certificate parser thread
self.parser = self.start_parser_thread()
# Start certificate finding thread
self.finder = self.start_finder_thread()
self.monitor_threads()
if self.one_off:
self.handle_one_off()
else:
self.monitor_threads()
def exit_gracefully(self, signum, _frame):
"""Set self.stop so the main thread stops."""
......@@ -261,6 +265,65 @@ class Stapledaemon(object):
pass # cannot join current thread
LOG.info("Stopping daemon thread")
def handle_one_off(self):
"""
Stop threads that are done so we can do a one-off run.
- When the certfinder is done and the parsing queue is empty, we can
end the certparser thread.
- When the certparser is done and and the renewal queue is empty, we
can end the staplerenewers.
- When the staplerenewers are done and the stapleadder queue is empty,
we can end the stapleadder.
- When all of the above are done, we end the scheduler.
"""
# Queue and worker thread mapping, note: these queue MUST exist.
stop_threads = [
('parse', [self.parser]),
('renew', self.renewers)
]
# Check if enabled before adding stapleadder queue.
if self.staple_adder:
stop_threads.append(('adder', [self.staple_adder]))
def one_off_generator():
"""Make generator to iteratively end the stapled process."""
LOG.debug("START ENDING THREADS")
if self.finder.is_alive():
# If the finder is still active, we can't yet end anything.
yield False
for queue, threads in stop_threads:
# Queue must exist, if it doesn't it wasn't yet created.
if not self.scheduler.queue_exists(queue):
yield False
# Wait until queues are empty..
while not self.scheduler.is_empty_queue(queue):
yield False
# Stop threads that have empty queues..
for thread in threads:
thread.stop = True
for thread in threads:
while thread.is_alive():
yield False
# End the scheduler
self.scheduler.stop = True
yield True
for end in one_off_generator():
LOG.debug("Waiting for threads to complete their queued tasks..")
time.sleep(.25)
if self.exit_code_tracker.errors_occurred > 0:
LOG.error("One off completed with errors: %s", self.exit_code_tracker)
exit(1)
else:
LOG.info("One off completed without errors: %s", self.exit_code_tracker)
exit(0)
def __spawn_thread(self, name, thread_object, restarted=0, **kwargs):
"""
Spawns threads based on obejects and registers them in a dictionary.
......@@ -283,6 +346,7 @@ class Stapledaemon(object):
'kwargs': kwargs,
'thread': thread_obj,
'name': name,
'restarted': restarted
'restarted': restarted,
'stopped': False
})
return thread_obj
# -*- coding: utf-8 -*-
"""
This module defines a context in which we can run actions that are likely to
fail because they have intricate dependencies e.g. network connections,
......@@ -57,9 +56,7 @@ STACK_TRACE_FILENAME = "stapled_exception{:%Y%m%d-%H%M%s%f}.trace"
@contextmanager
def stapled_except_handle(ctx=None):
"""
Handle lots of potential errors and reschedule failed action contexts.
"""
"""Handle potential errors and reschedule failed action contexts."""
# pylint: disable=too-many-branches,too-many-statements
try:
yield # do the "with stapled_except_handle(ctx):" code block
......@@ -77,8 +74,8 @@ def stapled_except_handle(ctx=None):
LOG.critical("%s, giving up..", exc)
except (SocketError, BrokenPipeError) as exc:
# This is a fatal exception that can occur during initialisation of a
# StapleAdder or when an StapleAdder uses a socket that consistently has a
# broken pipe
# StapleAdder or when an StapleAdder uses a socket that consistently
# has a broken pipe
LOG.critical(exc)
except (RenewalRequirementMissing,
CertValidationError,
......@@ -143,37 +140,18 @@ def stapled_except_handle(ctx=None):
"entries",
err_count, len_ocsp_urls
)
except (IOError, OSError) as exc:
LOG.critical(handle_file_error(exc))
except OSError as exc:
LOG.critical(exc)
# the show must go on..
except Exception as exc: # pylint: disable=broad-except
dump_stack_trace(ctx, exc)
def handle_file_error(exc):
"""
Wrapper for handling IOError and OSError logging..
Can't use FileNotFoundError and PermissionError because they don't exist in
Python 2.7.x yet. This won't be required after we remove Python 2.7.x
support.
:param Exception exc: OSError or IOError to handle logging for.
:return str: Reason for OSError/IOError.
"""
if exc.errno == errno.EPERM or exc.errno == errno.EACCES:
reason = "Permission error"
elif exc.errno == errno.ENOENT:
reason = "File not found error"
elif isinstance(exc, IOError):
reason = "I/O Error"
else:
reason = "OS Error"
return "{}: {}".format(reason, str(exc))
def delete_ocsp_for_context(ctx):
"""
Delete OSCP staple for a context.
When something bad happens, sometimes it is good to delete a related bad
OCSP file so it can't be served any more.
......@@ -185,16 +163,18 @@ def delete_ocsp_for_context(ctx):
ocsp_file = "{}.ocsp".format(ctx.model.filename)
with open(ocsp_file, 'w') as ocsp_file_obj:
ocsp_file_obj.write("")
except (IOError, OSError) as exc:
except (OSError) as exc:
LOG.debug(
"Can't replace OCSP staple \"%s\" by an empty stub, reason: %s",
ocsp_file,
handle_file_error(exc)
exc
)
def dump_stack_trace(ctx, exc):
"""
Dump a stack trace to a file so stapled can be debugged.
Examine the last exception and dump a stack trace to a file, if it fails
due to an IOError or OSError, log that it failed so the a sysadmin
may make the directory writeable.
......
# -*- coding: utf-8 -*-
"""
This module holds the application specific exceptions.
"""
......
# -*- coding: utf-8 -*-
"""
Module for adding OCSP Staples to a running HAProxy instance.
"""
......
# -*- coding: utf-8 -*-
"""
This module takes renew task contexts from the scheduler which contain
certificate models that consist of parsed certificates. It then generates an
......
# -*- coding: utf-8 -*-
"""
This module defines an extended version of the general purpose
:class:`scheduling.ScheduledTaskContext` for use in the OCSP daemon.
......
# -*- coding: utf-8 -*-
"""
This is a general purpose scheduler. It does best effort scheduling and
execution of expired items in the order they are added. This also means that
there is no guarantee the tasks will be executed on time every time, in fact
they will always be late, even if just by milliseconds. If you need it to be
done on time, you schedule it early, but remember that it will still be best
effort.