logview: migrate optparse to argparse, add additional date filtering capabilities, and some pep8 fixes

This commit is contained in:
Francois Andrieu 2020-10-28 16:45:29 +01:00
parent 88b07454b7
commit 5e91005e93
2 changed files with 110 additions and 71 deletions

View File

@ -15,6 +15,10 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = r'''
callback: logdetail
callback_type: notification
@ -48,9 +52,10 @@ except ImportError:
# Ansible v1 compat
CallbackBase = object
TIME_FORMAT="%b %d %Y %H:%M:%S"
TIME_FORMAT = "%b %d %Y %H:%M:%S"
MSG_FORMAT = "%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
def getlogin():
try:
@ -59,6 +64,7 @@ def getlogin():
user = pwd.getpwuid(os.geteuid())[0]
return user
class LogMech(object):
def __init__(self, logpath):
self.started = time.time()
@ -91,13 +97,13 @@ class LogMech(object):
def logpath_play(self):
# this is all to get our path to look nice ish
tstamp = time.strftime('%Y/%m/%d/%H.%M.%S', time.localtime(self.started))
path = os.path.normpath(self.logpath + '/' + self.playbook_id + '/' + tstamp + '/')
path = os.path.normpath(self.logpath + '/' + self.playbook_id + '/' + tstamp + '/')
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != 17: # if it is not dir exists then raise it up
if e.errno != 17: # if it is not dir exists then raise it up
raise
return path
@ -132,18 +138,17 @@ class LogMech(object):
host = 'HOSTMISSING'
if type(data) == dict:
name = data.get('module_name',None)
name = data.get('module_name', None)
else:
name = "unknown"
# we're in setup - move the invocation info up one level
if 'invocation' in data:
invoc = data['invocation']
if not name and 'module_name' in invoc:
name = invoc['module_name']
#don't add this since it can often contain complete passwords :(
# don't add this since it can often contain complete passwords :(
del(data['invocation'])
if task:
@ -160,7 +165,7 @@ class LogMech(object):
if self.play_info.get('check', False) and self.play_info.get('diff', False):
category = 'CHECK_DIFF:' + category
elif self.play_info.get('check', False):
elif self.play_info.get('check', False):
category = 'CHECK:' + category
# Sometimes this is None.. othertimes it's fine. Othertimes it has
@ -175,7 +180,6 @@ class LogMech(object):
fd.close()
class CallbackModule(CallbackBase):
"""
logs playbook results, per host, in /var/log/ansible/hosts
@ -227,7 +231,8 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_task_start(self, task, is_conditional):
self.task = task
self.task._name = task.name
if self.task:
self.task._name = task.get_name().strip()
self.logmech._last_task_start = time.time()
self._task_count += 1
@ -264,8 +269,9 @@ class CallbackModule(CallbackBase):
pb_info['extra_vars'] = play._variable_manager.extra_vars
pb_info['inventory'] = play._variable_manager._inventory._sources
pb_info['playbook_checksum'] = secure_hash(path)
pb_info['check'] = self.play_context.check_mode
pb_info['diff'] = self.play_context.diff
if hasattr(self, "play_context"):
pb_info['check'] = self.play_context.check_mode
pb_info['diff'] = self.play_context.diff
self.logmech.play_log(json.dumps(pb_info, indent=4))
self._play_count += 1
@ -273,17 +279,17 @@ class CallbackModule(CallbackBase):
info = {}
info['play'] = play.name
info['hosts'] = play.hosts
info['transport'] = str(self.play_context.connection)
info['number'] = self._play_count
info['check'] = self.play_context.check_mode
info['diff'] = self.play_context.diff
if hasattr(self, "play_context"):
info['transport'] = str(self.play_context.connection)
info['check'] = self.play_context.check_mode
info['diff'] = self.play_context.diff
self.logmech.play_info = info
try:
self.logmech.play_log(json.dumps(info, indent=4))
except TypeError:
print(("Failed to conver to JSON:", info))
def v2_playbook_on_stats(self, stats):
results = {}
for host in list(stats.processed.keys()):
@ -292,5 +298,3 @@ class CallbackModule(CallbackBase):
self.logmech.play_log(json.dumps({'stats': results}, indent=4))
self.logmech.play_log(json.dumps({'playbook_end': time.time()}, indent=4))
print(('logs written to: %s' % self.logmech.logpath_play))

View File

@ -1,16 +1,23 @@
#!/usr/bin/python3
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: et ts=4 ai sw=4 sts=0
import sys
import json
from optparse import OptionParser
from argparse import ArgumentParser
import os
import re
import glob
import gzip
from datetime import datetime, date, timedelta
from datetime import datetime, timedelta
import dateutil.parser as dateparser
import configparser
try:
# Python3
import configparser
except ImportError:
# Python2
import ConfigParser as configparser
from ansible.config.manager import find_ini_config_file
from ansible.utils.color import hostcolor, stringc
from ansible.utils.color import stringc
from ansible import constants as C
from collections import Counter
@ -19,11 +26,13 @@ if not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
else:
HAS_COLOR = True
logpath = '/var/log/ansible'
DEFAULT_LOGPATH = '/var/log/ansible'
default_search_terms = ['CHANGED', 'FAILED']
date_terms = {
"today": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0),
"yesterday": lambda: datetime.today().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
"today": lambda: datetime.today().replace(
hour=0, minute=0, second=0, microsecond=0),
"yesterday": lambda: datetime.today().replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(1),
}
@ -44,7 +53,7 @@ def colorByCat(category, txt=None):
# This hack make sure the text width is the same as any other colored text
color_out = u'\x1b[0;00m%s\x1b[0m' % (txt,)
if not HAS_COLOR:
color_out = txt
color_out = txt
return color_out
@ -58,12 +67,14 @@ def colorByStats(txt, stats):
else:
return stringc(txt, C.COLOR_OK)
def colorByCount(txt, count, color):
s = "%s%s" % (txt, count)
if count > 0 and HAS_COLOR:
s = stringc(s, color)
s = stringc(s, color)
return s
def parse_info(infofile):
data = {}
with open(infofile) as f:
@ -87,19 +98,26 @@ def format_stats(stats):
colorByCount("unr:", stats['unreachable'], C.COLOR_UNREACHABLE),
colorByCount("fail:", stats['failures'], C.COLOR_ERROR))
def col_width(rows):
widths = []
for col in zip(*(rows)):
col_width = max(map(len,col))
col_width = max(map(len, col))
widths.append(col_width)
widths[-1] = 0 # don't pad last column
widths[-1] = 0 # don't pad last column
return widths
def date_cheat(datestr):
dc = date_terms.get(datestr, lambda: dateparser.parse(datestr))
return dc()
def date_from_path(path):
date_comp = re.search(r'/(\d{4})/(\d{2})/(\d{2})', path)
return datetime(*map(int, date_comp.groups()))
def parse_args(args):
usage = """
logview [options] [-d datestr] [-p playbook]
@ -114,53 +132,56 @@ def parse_args(args):
logview -s ANY -d yesterday -p mirrorlist # list all events from the mirrorlist playbook
"""
parser = OptionParser(usage=usage)
parser.add_option("-d", default='today', dest='datestr', help="time string of when you want logs")
parser.add_option("-p", default='*', dest='playbook', help="the playbook you want to look for")
parser.add_option("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname")
parser.add_option("-m", default=False, dest='message', action='store_true', help='Show tasks output')
parser.add_option("-v", default=False, dest='verbose', action='store_true', help='Verbose')
parser.add_option("-s", default=[], dest='search_terms', action='append', help="status to search for")
parser.add_option("-l", default=False, dest="list_pb", action='store_true', help="list playbooks for a specific date")
parser.add_option("-L", default=False, dest="list_all_pb", action='store_true', help="list all playbooks ever ran")
parser.add_option("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
(opts, args) = parser.parse_args(args)
parser = ArgumentParser(usage=usage)
date_group = parser.add_mutually_exclusive_group()
date_group.add_argument("-d", default='today', dest='datestr', help="display logs from specified date")
date_group.add_argument("--since", dest="since", help="display logs since specified date")
date_group.add_argument("--all", default=False, dest="list_all", action='store_true', help="display all logs")
parser.add_argument("-p", default='*', dest='playbook', help="the playbook you want to look for")
parser.add_argument("-H", default=[], dest='hostname', action='append', help="Limit to the specified hostname")
parser.add_argument("-m", default=False, dest='message', action='store_true', help='Show tasks output')
parser.add_argument("-v", default=False, dest='verbose', action='store_true', help='Verbose')
parser.add_argument("-s", default=[], dest='search_terms', action='append', help="status to search for")
parser.add_argument("-l", default=False, dest="list_pb", action='store_true', help="list playbook runs")
parser.add_argument("--profile", default=False, dest="profile", action='store_true', help="output timing input per task")
opts = parser.parse_args(args)
opts.datestr = date_cheat(opts.datestr)
if not opts.search_terms:
opts.search_terms = default_search_terms
if opts.since:
opts.since = date_cheat(opts.since)
opts.search_terms = list(map(str.upper, opts.search_terms))
return opts, args
return opts
def search_logs(opts, logfiles):
rows = []
headers = []
msg = ''
for fn in sorted(logfiles):
hostname = os.path.basename(fn).replace('.log', '').replace('.gz', '')
timestamp = os.path.basename(os.path.dirname(fn))
if opts.hostname and hostname not in opts.hostname:
continue
try:
with gzip.open(fn) as f:
f.read()
open_f = gzip.open(fn, "rt")
except:
except IOError:
open_f = open(fn)
for line in open_f:
things = line.split('\t')
if len(things) < 5:
print("(logview error - unhandled line): %r\n" % line)
msg += "(logview error - unhandled line): %r\n" % line
continue
# See callback_plugins/logdetail.py for how these lines get created.
# MSG_FORMAT="%(now)s\t%(count)s\t%(category)s\t%(name)s\t%(data)s\n"
task_ts, count, category, name, data = things
if category in opts.search_terms or 'ANY' in opts.search_terms:
dur = None
last_col = ""
@ -170,7 +191,7 @@ def search_logs(opts, logfiles):
end = slurp.get('task_end', 0)
if st and end:
dur = '%.2fs' % (float(end) - float(st))
state = colorByCat(category)
c_hostname = colorByCat(category, hostname)
@ -180,11 +201,14 @@ def search_logs(opts, logfiles):
c_hostname = colorByStats(hostname, slurp)
state = colorByStats(category, slurp)
result = [timestamp, c_hostname, task_ts, count, state]
if not name:
name = slurp.get("task_module")
try:
name = name.decode('utf8')
except AttributeError:
pass
result.append(name)
if dur:
@ -192,18 +216,18 @@ def search_logs(opts, logfiles):
if not opts.verbose:
if type(slurp) == dict:
for term in ['cmd',]:
for term in ['cmd', ]:
if term in slurp:
last_col += '\t%s:%s' % (term, slurp.get(term, None))
if opts.message:
for term in ['msg', 'stdout']:
if term in slurp:
value = slurp.get(term, None)
if type(value) is list:
value = "\n".join(value)
value = "\n".join(value)
if value:
last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
last_col += '\n%s: %s\n' % (term, colorByCat(category, value.strip()))
else:
last_col += '\n'
last_col += json.dumps(slurp, indent=4)
@ -211,7 +235,7 @@ def search_logs(opts, logfiles):
result.append(last_col)
rows.append(result)
return rows
@ -220,21 +244,26 @@ def main(args):
if cfg:
cp = configparser.ConfigParser()
cp.read(cfg)
logpath = cp.get('callback_logdetail', "log_path", fallback="/var/log/ansible")
opts, args = parse_args(args)
try:
logpath = cp.get('callback_logdetail', "log_path")
except configparser.NoSectionError:
logpath = DEFAULT_LOGPATH
opts = parse_args(args)
rows = []
# List play summary
if opts.list_pb or opts.list_all_pb:
rows.append([ "Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
for r,d,f in os.walk(logpath):
if opts.list_pb:
rows.append(["Date", colorByCat("", "Playbook"), "Ran By", "Hosts", "Stats"])
for r, d, f in os.walk(logpath):
if opts.since and f and date_from_path(r) < opts.since:
continue
for file in f:
if file.endswith('.info'):
pb = parse_info(os.path.join(r,file))
pb = parse_info(os.path.join(r, file))
pb_name = os.path.splitext(os.path.basename(pb['playbook']))[0]
pb_date = datetime.fromtimestamp(pb['playbook_start'])
if (
opts.list_all_pb
opts.list_all or opts.since
or (
opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0)
and opts.datestr == pb_date)
@ -253,8 +282,7 @@ def main(args):
summary = format_stats(stats)
# summary = "ok:%s chd:%s unr:%s faild:%s" % (stats['ok'], stats['changed'], stats['unreachable'], stats['failures'])
rows.append([ str(pb_date), pb_name, pb['userid'], str(host_count), summary ])
rows.append([str(pb_date), pb_name, pb['userid'], str(host_count), summary])
m_widths = col_width(rows)
if len(rows) <= 1:
@ -263,12 +291,19 @@ def main(args):
for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))).strip())
# Play detail
else:
for pb in glob.glob(os.path.join(logpath, opts.playbook)):
pb_name = os.path.basename(pb)
for pb_logdir in glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d"))):
if opts.list_all or opts.since:
date_glob = glob.glob(os.path.join(pb, "*/*/*"))
else:
date_glob = glob.glob(os.path.join(pb, opts.datestr.strftime("%Y/%m/%d")))
for pb_logdir in date_glob:
if opts.since:
run_date = date_from_path(pb_logdir)
if run_date < opts.since:
continue
if opts.datestr != opts.datestr.replace(hour=0, minute=0, second=0, microsecond=0):
logfiles = glob.glob(pb_logdir + '/' + opts.datestr.strftime("%H.%M.%S") + '/*.log*')
else:
@ -276,11 +311,11 @@ def main(args):
rows = search_logs(opts, logfiles)
if rows:
m_widths = col_width(rows)
print(pb_name)
print("%s\n-------" % (pb_name,))
for row in rows:
print(" ".join((val.ljust(width) for val, width in zip(row, m_widths))))
#print(pb_name)
#print(msg)
print("")
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))