diff options
author | navid <navid@ef72aa8b-4018-0410-8976-d6e080ef94d8> | 2007-11-22 10:40:27 +0000 |
---|---|---|
committer | navid <navid@ef72aa8b-4018-0410-8976-d6e080ef94d8> | 2007-11-22 10:40:27 +0000 |
commit | e2ea89dbbe1b1731ba4eda7fbe40ea315594a1c9 (patch) | |
tree | affd667e6f91c207a1c497ad895f336f0f386f59 /src/extras/sos-html-logs/lib | |
parent | e60df25b9b22ec1bf78d8d95ca7673d559f13333 (diff) | |
download | sos-e2ea89dbbe1b1731ba4eda7fbe40ea315594a1c9.tar.gz |
merged navid-dev back into trunk, see spec file for details
git-svn-id: svn+ssh://svn.fedorahosted.org/svn/sos/trunk@457 ef72aa8b-4018-0410-8976-d6e080ef94d8
Diffstat (limited to 'src/extras/sos-html-logs/lib')
-rw-r--r-- | src/extras/sos-html-logs/lib/__init__.py | 0 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/helpers.py | 41 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/logs_abstraction.py | 415 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/parsers/__init__.py | 0 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/parsers/crazyclock_parser.py | 24 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/parsers/dmapper_parser.py | 106 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/parsers/simple_parser.py | 29 | ||||
-rw-r--r-- | src/extras/sos-html-logs/lib/parsers/simple_parser.rules | 38 |
8 files changed, 653 insertions, 0 deletions
diff --git a/src/extras/sos-html-logs/lib/__init__.py b/src/extras/sos-html-logs/lib/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/src/extras/sos-html-logs/lib/__init__.py diff --git a/src/extras/sos-html-logs/lib/helpers.py b/src/extras/sos-html-logs/lib/helpers.py new file mode 100644 index 00000000..995cf16b --- /dev/null +++ b/src/extras/sos-html-logs/lib/helpers.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +def ksort(d, func = None): + keys = d.keys() + keys.sort(func) + return keys + +class Memoize: + """Memoize(fn) - an instance which acts like fn but memoizes its arguments + Will only work on functions with non-mutable arguments + """ + def __init__(self, fn): + self.fn = fn + self.memo = {} + def __call__(self, *args): + if not self.memo.has_key(args): + self.memo[args] = self.fn(*args) + return self.memo[args] + +class SQL: + def __init__(self): + self.dbcon = sqlite.connect(":memory:", check_same_thread=False) + self.dbcur = self.dbcon.cursor() + + self.dbcon.execute("create table events(date, host, position, message, css_style)") + + def execute(self, query): + return self.dbcon.execute(query) + +def color_gradient(src, dst, percent): + csrc = [ col for col in src ] + cdst = [ col for col in dst ] + toret = [] + + for inc in range(0,3): + toret.append(csrc[inc] + ((cdst[inc] - csrc[inc]) * percent / 100)) + + return toret + +def rgb_to_hex(rgb): + return "%X%X%X" % (rgb[0], rgb[1], rgb[2]) diff --git a/src/extras/sos-html-logs/lib/logs_abstraction.py b/src/extras/sos-html-logs/lib/logs_abstraction.py new file mode 100644 index 00000000..5708c960 --- /dev/null +++ b/src/extras/sos-html-logs/lib/logs_abstraction.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python + +import os, sys, time, re, pdb +from threading import Thread, Lock +from helpers import * +from operator import itemgetter +import traceback + +class cluster_class: + + def __init__(self): + self.hosts = {} + self.index = {} + self.daemon_log_counter = [] + self.parsers = [] + + def host_names(self): + return ksort(self.hosts) + + def register_parser(self, parser_class): + self.parsers.append(parser_class) + + def get_parser(self, parser_name): + for parser in self.parsers: + if parser.__class__.__name__ == parser_name: + return parser + + def get_host(self, host): + return self.hosts[host] + + def tell(self): + toret = {} + for host in self.hosts: + toret[host] = self.hosts[host].tell() + return toret + + def tell_sum(self): + toret = 0 + for host in self.hosts: + toret += self.hosts[host].tell() + return toret + + def size(self): + toret = 0 + for host in self.hosts: + toret += self.hosts[host].size() + return toret + + def seek(self, positions): + # make sure positions in argument are valid + for host in self.hosts: + if host not in positions.keys(): + print "cannot find", positions + raise "Invalid_Positions" + + # seek each host to saved position + for host in positions: + self.hosts[host].seek(positions[host]) + + return True + + def seek_beginning(self): + for host in self.hosts: + self.hosts[host].seek(0) + + return True + + def add_log(self, logname): + log = logfile_class(logname) + hostname = log.hostname() + sys.stderr.write("""adding log "%s" for host %s\n""" % (logname, hostname)) + if not self.hosts.has_key(hostname): + self.hosts[hostname] = host_class() + self.hosts[hostname].add_log(log) + + def get_position_by_date(self, goto_date): + try: + return self.index[goto_date]["position"] + except KeyError: + # can't find position in cache, calculate on the fly + # + for cmp_date in ksort(self.index): + if goto_date <= cmp_date: + return self.index[cmp_date]["position"] + return None + + def parse(self, threaded = False): + + if threaded and (not hasattr(self,"parse_t") or self.parse_t == None): + self.parse_t = Thread(target=self.parse, name='parse-thread', args = [True] ) + self.parse_t.start() + return self.parse_t + + print "parsing begins" + + daemon_log_counter = {} + + self.seek_beginning() + + for date in self: + + self.index[date.date] = { "position":date.position, "log_counter":{} } + + for host in self.hosts: + self.index[date.date]["log_counter"][host]=0 + + try: + for log in date[host]: + self.index[date.date]["log_counter"][host]+=1 + + for parser_class in self.parsers: + parser_class.parse_line(date, log) + + # count how many logs per daemon + try: + daemon_log_counter[log.daemon()]+=1 + except KeyError: + daemon_log_counter[log.daemon()]=1 + + except "Eof": + # no more logs for this host + pass + + self.daemon_log_counter = sorted(daemon_log_counter.items(), key=itemgetter(1), reverse=True) + + print "parsing ends." + + def eof(self): + for host in self.hosts: + if not self.hosts[host].eof(): +# print "All logs are not EOF yet", host + return False + print "All logs are EOF!" + return True + + def __iter__(self): + return self + + def next(self): + if self.eof(): + raise StopIteration + + return log_date_class(cluster = self) + + def instance(self): + toret = cluster_class() + + for host in self.hosts: + toret.hosts[host] = host_class() + + for log in self.hosts[host].logs: + toret.hosts[host].logs.append(logfile_class(log.fname)) + + toret.index = self.index + toret.daemon_log_counter = self.daemon_log_counter + toret.parsers = self.parsers + + return toret + +class log_date_class: + def __init__(self, cluster): + self.cluster = cluster + self.date = None + self.hosts = cluster.hosts.keys() + + self.position = cluster.tell() + + newtime = None + + # 1st run, must find out what is the oldest date for each host + for host in self.hosts: + while True: + try: + newtime = time.strptime("2007 " + cluster.hosts[host].readline()[0:15], "%Y %b %d %H:%M:%S") + except "Eof": + break + except ValueError: + print "parsing error in line", cluster.hosts[host].tell() + else: + break + + if newtime: + if not self.date or newtime < self.date: + self.date = newtime + + if not cluster.hosts[host].eof(): + cluster.hosts[host].backline() + + # this should almost never happen, but just in case. + if not self.date: + raise "Strange_Eof" + + def __str__(self): + return time.strftime("%b %d %H:%M:%S", self.date) + + def __getitem__(self, host): + return log_date_host(self.cluster, self.cluster.hosts[host], self.date) + + def __iter__(self): + return self + +class log_date_host: + def __init__(self, cluster, host, date): + self.cluster = cluster + self.host = host + self.date = date + + self.parent_date = date + + def __iter__(self): + return self + + def next(self): + position = self.host.tell() + + self.host.readline() + + try: + if time.strptime("2007 " + self.host.cur_line[0:15], "%Y %b %d %H:%M:%S") <= self.date: + return log_line_class(self.parent_date, self.host, position, self.host.cur_line) + except: + return log_line_class(self.parent_date, self.host, position, self.host.cur_line) + + self.host.backline() + + raise StopIteration + +class log_line_class: + def __init__(self, date, host, position, line): + self.host = host + self.position = position + self.line = line + self.parse = Memoize(self.parse_uncached) + + self.parent_date = date + self.parent_host = host + + def parse_uncached(self): + try: + return re.findall(r"""^(... .. ..:..:..) %s ([-_0-9a-zA-Z \.\/\(\)]+)(\[[0-9]+\])?(:)? (.*)$""" % self.hostname(), self.line)[0] + except: + return [ None, None, None, None, None ] + + def __str__(self): + return self.line + + def date(self): + try: + return time.strptime("2007 " + self.line[0:15], "%Y %b %d %H:%M:%S") + except: + return False + + def hostname(self): + return self.line[16:].split(" ", 1)[0] + + def daemon(self): + return self.parse()[1] + + def message(self): + return self.parse()[4] + +class host_class: + + def __init__(self): + self.logs = [] + + self.log_idx = 0 # first log + self.log_ptr = 0 # first char + + self.cur_line = None + + def __str__(self): + return self.hostname() + + def add_log(self, logfile): + + for inc in range(0,len(self.logs)): + if logfile.time_end() < self.logs[inc].time_begin(): + self.logs.insert(inc, logfile) + break + else: + self.logs.append(logfile) + + def hostname(self): + return self.logs[0].hostname() +# try: return self.logs[0].hostname() +# except: return None + + def tell(self): + sumsize = 0 + if self.log_idx > 0: + for inc in range(0, self.log_idx): + sumsize += self.logs[inc].size() + try: + sumsize += self.fp().tell() + except TypeError: + pass + return sumsize + + def size(self): + sumsize = 0 + for inc in range(0, len(self.logs)): + sumsize += self.logs[inc].size() + return sumsize + + def eof(self): + if self.tell() >= self.size(): + return True + return False + + def seek(self, offset, whence = 0): + if whence == 1: offset = self.tell() + offset + elif whence == 2: offset = self.size() + offset + + sumsize = 0 + for inc in range(0, len(self.logs)): + if offset <= sumsize + self.logs[inc].size(): + offset -= sumsize + self.log_idx = inc + self.log_ptr = offset + self.logs[inc].seek(offset) + return True + sumsize += self.logs[inc].size() + raise "Off_Boundaries" + + def seek_and_read(self, offset, whence = 0): + self.seek(offset, whence) + return self.readline() + + def time(self): + return time.strptime("2007 " + self.cur_line[0:15], "%Y %b %d %H:%M:%S") + + def fp(self): + return self.logs[self.log_idx] + + def backline(self): + self.seek(-len(self.cur_line), 1) + + def readline(self): + if self.eof(): + raise "Eof" + + while True: + position = self.fp().tell() + fromfile = self.fp().fname + toret = self.fp().readline() + if len(toret) == 0: + if self.log_idx < len(self.logs): + self.log_idx += 1 + self.fp().seek(0) + continue + else: + return "" + + if len(toret) > 0 or toret == "": + self.cur_line = toret + self.cur_file = fromfile + self.cur_pos = position + return toret + else: + print "invalid line", toret + +class logfile_class: + + def __init__(self,fname): + self.fname = fname + self.fp = open(fname) + + def hostname(self): + pos = self.fp.tell() + self.seek(0) + toret = self.fp.readline()[16:].split(" ")[0] + self.fp.seek(pos) + return toret + + def time_begin(self): + pos = self.fp.tell() + self.fp.seek(0) + toret = time.strptime(self.fp.readline()[0:15], "%b %d %H:%M:%S") + self.fp.seek(pos) + return toret + + def time_end(self): + pos = self.fp.tell() + bs = 1024 + if self.size() < bs: bs = self.size() + self.fp.seek(-bs, 2) + buf = self.fp.read(bs) + bufsplit = buf.split("\n") + bufsplit.reverse() + for line in bufsplit: + if len(line) == 0: continue + try: toret = time.strptime(line[0:15], "%b %d %H:%M:%S") + except ValueError: print "Error in conversion"; continue + else: break + self.fp.seek(pos) + return toret + + def size(self): + return os.path.getsize(self.fname) + + def eof(self): + return self.fp.tell() > self.size() + + def readline(self): + return self.fp.readline() + + def seek(self,pos): +# if cmdline["verbose"]: +# print "seeking to position %d for file %s" % (pos, self.fname) +# traceback.print_stack() + self.fp.seek(pos) + + def tell(self): + return self.fp.tell() diff --git a/src/extras/sos-html-logs/lib/parsers/__init__.py b/src/extras/sos-html-logs/lib/parsers/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/src/extras/sos-html-logs/lib/parsers/__init__.py diff --git a/src/extras/sos-html-logs/lib/parsers/crazyclock_parser.py b/src/extras/sos-html-logs/lib/parsers/crazyclock_parser.py new file mode 100644 index 00000000..3e794a6f --- /dev/null +++ b/src/extras/sos-html-logs/lib/parsers/crazyclock_parser.py @@ -0,0 +1,24 @@ +import time +import soshtmllogs.parsers_base as parsers_base + +class crazyclock_Parser(parsers_base.base_Parser_class): + + def initialize(self): + # in this dict, we store the last date for each host + self.last_date = {} + + def parse_line(self, date, log): + + if date.date != log.date(): + self.add_event(log, "Time skew (%d seconds in the past)" % int(time.mktime(date.date) - time.mktime(log.date())), "color:purple; background-color:yellow") + + self.last_date[log.parent_host] = log.date() + + return + + def analyse_line(self, log): + + yield """The following line matched the rule:<BR>""" + yield """<DIV STYLE="margin-top: 10px; padding: 10px 10px 10px 10px; margin-bottom: 10px; background-color: white; border: 1px dotted black;">%s</B></DIV>""" % log.line + + yield "The logged time for this message is before the one for the previous message appearing in the log." diff --git a/src/extras/sos-html-logs/lib/parsers/dmapper_parser.py b/src/extras/sos-html-logs/lib/parsers/dmapper_parser.py new file mode 100644 index 00000000..279df50e --- /dev/null +++ b/src/extras/sos-html-logs/lib/parsers/dmapper_parser.py @@ -0,0 +1,106 @@ +import re +import soshtmllogs.parsers_base as parsers_base + +class dmapper_Parser(parsers_base.base_Parser_class): + default_css = "color:black; background-color:orange; font-size:larger" + + def initialize(self): + self.db.execute("create table dmapper(disk PRIMARY KEY, host INTEGER, channel INTEGER, id INTEGER, lun INTEGER)") + + def parse_line(self, date, logline): + + # kernel: sd 1:0:0:49152: Attached scsi disk sdt + found = re.findall(r"""^sd (.+):(.+):(.+):(.+): Attached scsi disk (.+)$""", logline.message()) + if found: + # we can learn a little about the layout + found = found[0] + self.db.execute("""replace into dmapper(disk,host,channel,id,lun) values ("%s",%d,%d,%d,%d)""" + % (found[4], int(found[0]), int(found[1]), int(found[2]), int(found[3])) + ) + + found = re.findall(r"""^Attached scsi disk (.+) at scsi(.+), channel (.+), id (.+), lun (.+)$""", logline.message()) + if found: + # we can learn a little about the layout + found = found[0] + self.db.execute("""replace into dmapper(disk,host,channel,id,lun) values ("%s",%d,%d,%d,%d)""" + % (found[0], int(found[1]), int(found[2]), int(found[3]), int(found[4])) + ) + + #Nov 7 12:55:38 itrac415 kernel: SCSI error : <2 0 3 0> return code = 0x20000 + found = re.findall(r"""^SCSI error : <(.+)> return code = (.+)$""", logline.message()) + if found: + found = found[0] + scsi_address_split = [ int(sid) for sid in found[0].split(" ") ] + results = self.db.execute_and_fetch("select disk from dmapper where host = %d AND channel = %d AND id = %d AND lun = %d" % + (scsi_address_split[0], scsi_address_split[1], scsi_address_split[2], scsi_address_split[3]) + ) + for row in results: found[0] = row["disk"] + self.add_event(logline, "SCSI error on %s - %s" % (found[0], self._get_did_error(found[1])), self.default_css) + return + + found = re.findall(r"""^end_request: I/O error, dev (.*), sector .*$""", logline.message()) + if found: + self.add_event(logline, "I/O error on %s" % (found[0]), self.default_css) + return + + if logline.daemon() != "multipathd": + return + + found = re.findall(r"""(.*): mark as failed$""", logline.message()) + if found: + disk = self._get_disk_from_majmin(found[0]) + self.add_event(logline, "Multipath path %s (%s) failed" % (found[0], disk), self.default_css) + return + + found = re.findall(r"""(.*): reinstated$""", logline.message()) + if found: + disk = self._get_disk_from_majmin(found[0]) + self.add_event(logline, "Multipath path %s (%s) reinstated" % (found[0], disk), self.default_css) + return + + return + + def _get_disk_from_majmin(self, majmin): + major, minor = majmin.split(":") + major, minor = int(major), int(minor) + + block_majors = [8, 65, 66, 67, 68, 69, 70, 71, 128, 129, 130, 131, 132, 133, 134, 135] + + disk = (block_majors.index(major) * 16) + int(minor / 16) + partition = minor % 16 + + # 97 = ord('a') + # 25 = ord('z') - ord('a') + + rchar = chr(97 + (disk % 25)) + + if disk > 25: + lchar = chr(97 - 1 + int(disk / 25)) + rchar = chr(ord(rchar)-1) + else: + lchar = "" + + return "sd" + lchar + rchar + + def _get_did_error(self, hexerr): + # hexherr = 0x20000 + + if not hexerr.startswith("0x"): + return "Unknown error code (%s)" % hexerr + + err = hexerr[2] + + if err == "0": return "DID_OK (NO error)" + if err == "1": return "DID_NO_CONNECT (Couldn\\'t connect before timeout period)" + if err == "2": return "DID_BUS_BUSY (BUS stayed busy through time out period)" + if err == "3": return "DID_TIME_OUT (TIMED OUT for other reason)" + if err == "4": return "DID_BAD_TARGET (BAD target)" + if err == "5": return "DID_ABORT (Told to abort for some other reason)" + if err == "6": return "DID_PARITY (Parity error)" + if err == "7": return "DID_ERROR (Internal error)" + if err == "8": return "DID_RESET (Reset by somebody)" + if err == "9": return "DID_BAD_INTR (Got an interrupt we weren't expecting)" + if err == "a": return "DID_PASSTHROUGH (Force command past mid-layer)" + if err == "b": return "DID_SOFT_ERROR (The low level driver just wish a retry)" + if err == "c": return "DID_IMM_RETRY (Retry without decrementing retry count)" + if err == "d": return "DID_REQUEUE (Requeue command (no immediate retry) also without decrementing the retry count)" diff --git a/src/extras/sos-html-logs/lib/parsers/simple_parser.py b/src/extras/sos-html-logs/lib/parsers/simple_parser.py new file mode 100644 index 00000000..9beb10f8 --- /dev/null +++ b/src/extras/sos-html-logs/lib/parsers/simple_parser.py @@ -0,0 +1,29 @@ +import ConfigParser, re +import soshtmllogs.parsers_base as parsers_base + +class simple_Parser(parsers_base.base_Parser_class): + + def initialize(self): + self.config = ConfigParser.ConfigParser() + self.config.readfp(open('/usr/lib/python2.4/site-packages/soshtmllogs/parsers/simple_parser.rules')) + + def parse_line(self, date, log): + + for section in self.config.sections(): + match = False + + if self.config.has_option(section, "find"): + if log.line.find(self.config.get(section, "find")) >= 0: + match = True + elif self.config.has_option(section, "regex"): + if re.match(self.config.get(section, "regex"), log.line): + match = True + + if not match: + continue + + self.add_event(log, section, "color:green; background-color:yellow; font-size:larger") + + return + + return None diff --git a/src/extras/sos-html-logs/lib/parsers/simple_parser.rules b/src/extras/sos-html-logs/lib/parsers/simple_parser.rules new file mode 100644 index 00000000..724fbfa8 --- /dev/null +++ b/src/extras/sos-html-logs/lib/parsers/simple_parser.rules @@ -0,0 +1,38 @@ +[Syslog restart (possible system restart)] +regex = .*syslogd .*: restart.$ + +[System reboot] +regex = ^kernel: Linux version + +[Fencing node] +regex = ^.*fencing node + +[Fencing node succeeded] +regex = .*fence ".*" success + +[Fencing node failed] +regex = .*fence ".* + +[Quorum lost] +find = quorum lost, blocking activity + +[Quorum regained] +find = quorum regained, resuming activity + +[Segmentation fault] +find = segfault at + +[Inittab reloaded] +find = Re-reading inittab + +[Init line respawning too fast] +find = respawning too fast: disabled for + +[Ext3 file-system error] +find = EXT3-fs error + +[File-system remounted read-only] +find = Remounting filesystem read-only + +[Shutting down] +find = shutdown: shutting down |