1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194
|
#!/usr/bin/env python3
"""
Collect systemd-journal log entries around time of daemon exit and coredumps.
"""
import datetime
import json
import logging
import pathlib
import shutil
import subprocess
import sys
TIMESPAN_BEFORE = 600 # s
TIMESPAN_AFTER = TIMESPAN_BEFORE
CURSOR_DIR = pathlib.Path('/var/lib/knot-resolver')
CURSOR_PATH = CURSOR_DIR / 'coredump_watcher.cursor'
class Timestamp:
def __init__(self, usec):
self.usec = int(usec)
@property
def unix(self):
return self.usec // 10**6
def __str__(self):
return datetime.datetime.utcfromtimestamp(self.unix).strftime('%Y-%m-%d_%H:%M:%S')
def __lt__(self, other):
return self.usec < other.usec
def __eq__(self, other):
return self.usec == other.usec
class Entry(dict):
@property
def timestamp(self):
usec = self.get('__REALTIME_TIMESTAMP')
if usec is None:
return None
return Timestamp(usec)
@property
def core_path(self):
filename = self.get('COREDUMP_FILENAME')
if filename is None:
return None
return pathlib.Path(filename)
def get_first(self, *keys):
for key in keys:
try:
return self[key]
except KeyError:
continue
return None
@property
def program(self):
return self.get_first('COREDUMP_UNIT', 'UNIT', '_SYSTEMD_UNIT', 'SYSLOG_IDENTIFIER')
@property
def pid(self):
return self.get_first('COREDUMP_PID', '_PID')
def save_cursor(cursor):
if cursor is None:
return
CURSOR_DIR.mkdir(parents=True, exist_ok=True)
with CURSOR_PATH.open('w') as curfile:
curfile.write(cursor)
logging.info('log cursor saved into %s, next run will skip old logs',
CURSOR_PATH)
def load_cursor():
try:
with CURSOR_PATH.open('r') as curfile:
logging.info('log cursor read from %s, skipping old logs',
CURSOR_PATH)
return curfile.read().strip()
except FileNotFoundError:
logging.info('log cursor file %s does not exist, parsing all logs',
CURSOR_PATH)
return None
def get_cursor():
journal_args = ['journalctl', '-o', 'json', '-n', '1']
with subprocess.Popen(
journal_args,
bufsize=1, # line buffering
universal_newlines=True,
stdout=subprocess.PIPE) as jproc:
stdout, _ = jproc.communicate()
data = json.loads(stdout)
entry = Entry(**data)
return entry.get('__CURSOR')
def read_journal(*args):
journal_args = [
'journalctl',
'-o', 'json',
'-u', 'kres*',
'-u', 'systemd-coredump*']
journal_args += args
with subprocess.Popen(
journal_args,
bufsize=1, # line buffering
universal_newlines=True,
stdout=subprocess.PIPE) as jproc:
for line in jproc.stdout:
data = json.loads(line)
yield Entry(**data)
def extract_logs(around_time, log_name):
start_time = Timestamp(around_time.usec - TIMESPAN_BEFORE * 10**6)
end_time = Timestamp(around_time.usec + TIMESPAN_AFTER * 10**6)
log_window = list(read_journal(
'--since', '@{}'.format(start_time.unix),
'--until', '@{}'.format(end_time.unix)))
with log_name.with_suffix('.json').open('w') as jsonf:
json.dump(log_window, jsonf, indent=4)
with log_name.with_suffix('.log').open('w') as logf:
logf.write('##### logs since {}\n'.format(start_time))
for entry in log_window:
if entry.timestamp == around_time:
logf.write('##### HERE #####\n')
logf.write('{t} {h} {prg}[{pid}]: {m}\n'.format(
t=entry.timestamp,
h=entry.get('_HOSTNAME'),
prg=entry.program,
pid=entry.pid,
m=entry.get('MESSAGE')))
logf.write('##### logs until {}\n'.format(end_time))
def main():
logging.basicConfig(level=logging.INFO)
if len(sys.argv) != 2:
sys.exit('Usage: {} <output log directory>'.format(sys.argv[0]))
outdir = pathlib.Path(sys.argv[1])
outdir.mkdir(parents=True, exist_ok=True)
cursor_previous = load_cursor()
cursor_at_start = get_cursor()
exit_times = []
coredumps = {}
filter_args = []
if cursor_previous is not None:
filter_args = ['--after-cursor', cursor_previous]
for entry in read_journal(*filter_args):
if 'EXIT_CODE' in entry:
logging.debug('exit@%s: %s', entry.timestamp, entry)
exit_times.append(entry.timestamp)
if 'COREDUMP_FILENAME' in entry:
logging.debug('coredump @ %s: %s', entry.timestamp, entry.core_path)
coredumps[entry.core_path] = entry.timestamp
exit_times.sort()
logging.debug('detected exits: %s', exit_times)
for exit_time in exit_times:
extract_logs(exit_time, outdir / str(exit_time))
coredumps_missing = 0
logging.debug('detected coredumps: %s', coredumps)
for core_path, core_time in coredumps.items():
core_name = core_path.name
out_path_prefix = (outdir / str(core_time))
extract_logs(core_time, out_path_prefix.with_suffix('.logs'))
try:
shutil.copy(
str(core_path),
str(out_path_prefix.with_suffix('.{}'.format(core_name))))
except FileNotFoundError as ex:
logging.error('coredump file %s cannot be copied: %s', core_path, ex)
coredumps_missing += 1
logging.info('wrote %d coredumps and %d logs snippets (%s coredumps missing)',
len(coredumps) - coredumps_missing, len(exit_times), coredumps_missing)
save_cursor(cursor_at_start)
if __name__ == '__main__':
main()
|