327 lines
12 KiB
Python
Executable File
327 lines
12 KiB
Python
Executable File
#!/usr/bin/python3
|
|
# -*- coding: utf-8 -*-
|
|
# vim: et ts=4 ai sw=4 sts=0
|
|
import sys
|
|
import json
|
|
from argparse import ArgumentParser
|
|
import os
|
|
import re
|
|
import glob
|
|
import gzip
|
|
from datetime import datetime, timedelta
|
|
import dateutil.parser as dateparser
|
|
try:
|
|
# Python3
|
|
import configparser
|
|
except ImportError:
|
|
# Python2
|
|
import configparser as configparser
|
|
from ansible.config.manager import find_ini_config_file
|
|
from ansible.utils.color import stringc
|
|
from ansible import constants as C
|
|
from collections import Counter
|
|
|
|
if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
|
|
HAS_COLOR = False
|
|
else:
|
|
HAS_COLOR = True
|
|
|
|
DEFAULT_LOGPATH = '/var/log/ansible'
|
|
default_search_terms = ['CHANGED', 'FAILED']
|
|
date_terms = {
|
|
"today": lambda: datetime.today().replace(
|
|
hour=0, minute=0, second=0, microsecond=0),
|
|
"yesterday": lambda: datetime.today().replace(
|
|
hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
|
|
}
|
|
|
|
|
|
def colorByCat(category, txt=None):
|
|
if not txt:
|
|
txt = category
|
|
if 'OK' in category:
|
|
color_out = stringc(txt, C.COLOR_OK)
|
|
elif "FAILED" in category:
|
|
color_out = stringc(txt, C.COLOR_ERROR)
|
|
elif "CHANGED" in category:
|
|
color_out = stringc(txt, C.COLOR_CHANGED)
|
|
elif "SKIPPED" in category:
|
|
color_out = stringc(txt, C.COLOR_SKIP)
|
|
elif "UNREACHABLE" in category:
|
|
color_out = stringc(txt, C.COLOR_UNREACHABLE)
|
|
else:
|
|
# This hack make sure the text width is the same as any other colored text
|
|
color_out = '\x1b[0;00m%s\x1b[0m' % (txt,)
|
|
if not HAS_COLOR:
|
|
color_out = txt
|
|
return color_out
|
|
|
|
|
|
def colorByStats(txt, stats):
|
|
if stats['failures'] != 0:
|
|
return stringc(txt, C.COLOR_ERROR)
|
|
elif stats['unreachable'] != 0:
|
|
return stringc(txt, C.COLOR_UNREACHABLE)
|
|
elif stats['changed'] != 0:
|
|
return stringc(txt, C.COLOR_CHANGED)
|
|
else:
|
|
return stringc(txt, C.COLOR_OK)
|
|
|
|
|
|
def colorByCount(txt, count, color):
|
|
s = "%s%s" % (txt, count)
|
|
if count > 0 and HAS_COLOR:
|
|
s = stringc(s, color)
|
|
return s
|
|
|
|
|
|
def parse_info(infofile):
|
|
data = {}
|
|
with open(infofile) as f:
|
|
content = f.read()
|
|
obj_list = [x+'}' for x in content.split('\n}')]
|
|
plays = []
|
|
for obj in obj_list[:-1]:
|
|
js = json.loads(obj)
|
|
if 'play' in js:
|
|
plays.append(js)
|
|
else:
|
|
data.update(json.loads(obj))
|
|
data['plays'] = plays
|
|
return data
|
|
|
|
|
|
def format_stats(stats):
|
|
return "%s %s %s %s" % (
|
|
colorByCount("ok:", stats['ok'], C.COLOR_OK),
|
|
colorByCount("chg:", stats['changed'], C.COLOR_CHANGED),
|
|
colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE),
|
|
colorByCount("fail:", stats['failures'], C.COLOR_ERROR))
|
|
|
|
|
|
def col_width(rows):
|
|
widths = []
|
|
for col in zip(*(rows)):
|
|
col_width = max(list(map(len, col)))
|
|
widths.append(col_width)
|
|
widths[-1] = 0 # don't pad last column
|
|
return widths
|
|
|
|
|
|
def date_cheat(datestr):
|
|
dc = date_terms.get(datestr, lambda: dateparser.parse(datestr))
|
|
return dc()
|
|
|
|
|
|
def date_from_path(path):
|
|
date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})', path)
|
|
return datetime(*list(map(int, date_comp.groups())))
|
|
|
|
|
|
def datetime_from_path(path):
|
|
date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})/(\d{2})\.(\d{2})\.(\d{2})', path)
|
|
return datetime(*list(map(int, date_comp.groups())))
|
|
|
|
|
|
def parse_args(args):
|
|
usage = """
|
|
logview [options] [-d datestr] [-p playbook]
|
|
|
|
examples:
|
|
logview -d yesterday -l # lists playbooks run on that date
|
|
|
|
logview -s OK -s FAILED -d yesterday # list events from yesterday that failed or were ok
|
|
|
|
logview -s CHANGED -d yesterday -p mirrorlist # list events that changed from the mirrorlist playbook
|
|
|
|
logview -s ANY -d yesterday -p mirrorlist # list all events from the mirrorlist playbook
|
|
|
|
"""
|
|
parser = ArgumentParser(usage=usage)
|
|
date_group = parser.add_mutually_exclusive_group()
|
|
date_group.add_argument("-d", default='today', dest='datestr', help="display logs from specified date")
|
|
date_group.add_argument("--since", dest="since", help="display logs since specified date")
|
|
date_group.add_argument("--all", default=False, dest="list_all", action='store_true', help="display all logs")
|
|
parser.add_argument("-p", default='*', dest='playbook', help="the playbook you want to look for")
|
|
parser.add_argument("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname")
|
|
parser.add_argument("-m", default=False, dest='message', action='store_true', help='Show tasks output')
|
|
parser.add_argument("-v", default=False, dest='verbose', action='store_true', help='Verbose')
|
|
parser.add_argument("-s", default=[], dest='search_terms', action='append', help="status to search for")
|
|
parser.add_argument("-l", default=False, dest="list_pb", action='store_true', help="list playbook runs")
|
|
parser.add_argument("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
|
|
opts = parser.parse_args(args)
|
|
|
|
opts.datestr = date_cheat(opts.datestr)
|
|
if not opts.search_terms:
|
|
opts.search_terms = default_search_terms
|
|
if opts.since:
|
|
opts.since = date_cheat(opts.since)
|
|
opts.search_terms = list(map(str.upper, opts.search_terms))
|
|
return opts
|
|
|
|
|
|
def search_logs(opts, logfiles):
|
|
rows = [("Play Date", colorByCat("Hostname"), "Task Time", "Id", colorByCat("State"), "Task Name", "")]
|
|
# rows = []
|
|
msg = ''
|
|
for fn in sorted(logfiles):
|
|
hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '')
|
|
timestamp = datetime_from_path(fn).strftime("%a %b %d %Y %H:%M:%S")
|
|
|
|
if opts.hostname and hostname not in opts.hostname:
|
|
continue
|
|
|
|
try:
|
|
with gzip.open(fn) as f:
|
|
f.read()
|
|
open_f = gzip.open(fn, "rt")
|
|
except IOError:
|
|
open_f = open(fn)
|
|
|
|
for line in open_f:
|
|
things = line.split('\t')
|
|
if len(things) < 5:
|
|
msg += "(logview error - unhandled line): %r\n" % line
|
|
continue
|
|
|
|
# See callback_plugins/logdetail.py for how these lines get created.
|
|
# MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
|
|
task_ts, count, category, name, data = things
|
|
|
|
if category in opts.search_terms or 'ANY' in opts.search_terms:
|
|
dur = None
|
|
last_col = ""
|
|
slurp = json.loads(data)
|
|
if opts.profile:
|
|
st = slurp.get('task_start', 0)
|
|
end = slurp.get('task_end', 0)
|
|
if st and end:
|
|
dur = '%.2fs' % (float(end) - float(st))
|
|
|
|
state = colorByCat(category)
|
|
c_hostname = colorByCat(category, hostname)
|
|
|
|
if "STATS" in category:
|
|
if type(slurp) == dict:
|
|
name = format_stats(slurp)
|
|
c_hostname = colorByStats(hostname, slurp)
|
|
state = colorByStats(category, slurp)
|
|
|
|
result = [timestamp, c_hostname, task_ts, count, state]
|
|
|
|
if not name:
|
|
name = slurp.get("task_module")
|
|
try:
|
|
name = name.decode('utf8')
|
|
except AttributeError:
|
|
pass
|
|
result.append(name)
|
|
|
|
if dur:
|
|
last_col += "%s " % (dur,)
|
|
|
|
if not opts.verbose:
|
|
if type(slurp) == dict:
|
|
for term in ['cmd', ]:
|
|
if term in slurp:
|
|
last_col += '\t%s:%s' % (term, slurp.get(term, None))
|
|
|
|
if opts.message:
|
|
for term in ['msg', 'stdout']:
|
|
if term in slurp:
|
|
value = slurp.get(term, None)
|
|
if type(value) is list:
|
|
value = "\n".join(value)
|
|
if value:
|
|
last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
|
|
else:
|
|
last_col += '\n'
|
|
last_col += json.dumps(slurp, indent=4)
|
|
last_col += '\n'
|
|
|
|
result.append(last_col)
|
|
rows.append(result)
|
|
|
|
return rows
|
|
|
|
|
|
def main(args):
|
|
cfg = find_ini_config_file()
|
|
if cfg:
|
|
cp = configparser.ConfigParser()
|
|
cp.read(cfg)
|
|
try:
|
|
logpath = cp.get('callback_logdetail', "log_path")
|
|
except configparser.NoSectionError:
|
|
logpath = DEFAULT_LOGPATH
|
|
opts = parse_args(args)
|
|
rows = []
|
|
|
|
# List play summary
|
|
if opts.list_pb:
|
|
rows.append(["Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
|
|
for r, d, f in os.walk(logpath):
|
|
if opts.since and f and date_from_path(r) < opts.since:
|
|
continue
|
|
for file in f:
|
|
if file.endswith('.info'):
|
|
pb = parse_info(os.path.join(r, file))
|
|
pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0] if "playbook" in pb else "???"
|
|
pb_date = datetime_from_path(r)
|
|
if (
|
|
opts.list_all or opts.since
|
|
or (
|
|
opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
and opts.datestr == pb_date)
|
|
or (
|
|
opts.datestr == opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
and opts.datestr.date() == pb_date.date())):
|
|
stats = Counter()
|
|
hosts = []
|
|
if "stats" in pb:
|
|
for host, stat in list(pb['stats'].items()):
|
|
del stat['task_userid']
|
|
stats += Counter(stat)
|
|
hosts.append(host)
|
|
host_count = len(set(hosts))
|
|
pb_name = colorByStats(pb_name, stats)
|
|
summary = format_stats(stats)
|
|
# summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures'])
|
|
|
|
rows.append([pb_date.isoformat(), pb_name, pb['userid'], str(host_count), summary])
|
|
|
|
m_widths = col_width(rows)
|
|
if len(rows) <= 1:
|
|
print("no log")
|
|
else:
|
|
for row in rows:
|
|
print((" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip()))
|
|
|
|
# Play detail
|
|
else:
|
|
for pb in glob.glob(os.path.join(logpath, opts.playbook)):
|
|
pb_name = os.path.basename(pb)
|
|
if opts.list_all or opts.since:
|
|
date_glob = glob.glob(os.path.join(pb, "*/*/*"))
|
|
else:
|
|
date_glob = glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d")))
|
|
for pb_logdir in date_glob:
|
|
run_date = date_from_path(pb_logdir)
|
|
if opts.since and run_date < opts.since:
|
|
continue
|
|
if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0):
|
|
logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*')
|
|
else:
|
|
logfiles = glob.glob(pb_logdir + '/*/*.log*')
|
|
rows = search_logs(opts, logfiles)
|
|
if len(rows) > 1:
|
|
m_widths = col_width(rows)
|
|
print(("%s\n-------" % (pb_name,)))
|
|
for row in rows:
|
|
print((" ".join((val.ljust(width) for val, width in zip(row, m_widths)))))
|
|
print("")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main(sys.argv[1:]))
|