ok
Direktori : /usr/lib/fm-agent/plugins/ |
Current File : //usr/lib/fm-agent/plugins/linux_logs.py |
# -*- coding: utf-8 -*- import agent_util import logging from glob import glob from library.log_matcher import LogMatcher class LinuxLogsPlugin(agent_util.Plugin): textkey = "linux_log" label = "Event log" @classmethod def get_metadata(self, config): data = { "count": { "label": "Event entry count", "options": None, "status": agent_util.SUPPORTED, "error_message": None, "unit": "count", "option_string": True, } } return data def check(self, textkey, data, config={}): # Passed data from the check schedule log_source = data.get("log_source") timescale = data.get("timescale") expression = data.get("filter") log_source = log_source.strip(" ") if "*" in log_source or "?" in log_source: # Handle multiple files files = glob(log_source) else: files = [log_source] file_inodes = {} total_metrics = 0 expression = expression or "*" expression = expression.replace("*", ".*") expression = expression.replace('""', ".*") for target in files: # Extract the file current inode try: file_inodes[target] = LogMatcher.get_file_inode(target) except OSError: import sys _, error, _ = sys.exc_info() logging.error("Error opening %s file." % (target)) logging.error(error) continue # Extract data from the agent cache about the check log_data = self.get_cache_results( textkey, "%s/%s" % (self.schedule.id, target) ) if log_data: log_data = log_data[0][-1] else: log_data = dict() last_line_number = log_data.get("last_known_line") stored_inode = log_data.get("inode") results = log_data.get("results", []) # Extract the lines of the file. try: total_lines, current_lines = LogMatcher.get_file_lines( last_line_number, target, file_inodes[target], stored_inode ) except IOError: import sys _, e, _ = sys.exc_info() logging.error("Could not open file: %s" % str(e)) return None logging.info( "Stored line %s Current line %s Looking at %s lines" % (str(last_line_number), str(total_lines), str(len(current_lines))) ) # Perform the matching of the expression in the lines log_matcher = LogMatcher(stored_inode) results = log_matcher.match(current_lines, expression) metric, results = log_matcher.calculate_metric(results, timescale) total_metrics += metric and metric or 0 cache_data = dict( inode=file_inodes[target], last_known_line=total_lines, results=results ) self.cache_result(textkey, "%s/%s" % (self.schedule.id, target), cache_data) logging.info( 'Found %s instances of "%s" in %s' % (str(metric or 0), expression, target) ) return total_metrics