ok

Mini Shell

Direktori : /usr/lib/fm-agent/plugins/
Upload File :
Current File : //usr/lib/fm-agent/plugins/nginx.py

import re
import agent_util
import logging
import glob
from library.log_matcher import LogMatcher

try:
    # Python3
    from urllib.request import urlopen
except ImportError:
    # Python2
    from urllib2 import urlopen

logger = logging.getLogger(__name__)


def execute_query(query):
    ret, output = agent_util.execute_command(query)
    return str(output)


LOG_COUNT_EXPRESSIONS = {"4xx": r"4\d{2}", "5xx": r"5\d{2}", "2xx": r"2\d{2}"}
DEFAULT_NGINX_LOG = "/var/log/nginx/access.log"


class NginxPlugin(agent_util.Plugin):
    textkey = "nginx"
    label = "Nginx"

    DEFAULTS = {"console_url": "http://localhost"}

    @classmethod
    def get_metadata(self, config):
        status = agent_util.SUPPORTED
        msg = None

        # check if nginx is even installed or running
        installed = agent_util.which("nginx")
        if not installed and not config.get("from_docker"):
            self.log.info("nginx binary not found")
            status = agent_util.UNSUPPORTED
            return {}

        if "console_url" not in config:
            config.update(self.DEFAULTS)

        if status == agent_util.SUPPORTED and not config.get("from_docker"):
            query = "%s/nginx_status" % config["console_url"]
            nginxStatus = (
                urlopen("%s/nginx_status" % config["console_url"]).read().decode()
            )
            if config.get("debug", False):
                self.log.debug("Nginx command '%s' output:" % query)
                self.log.debug(str(nginxStatus))

            if not nginxStatus:
                status = agent_util.MISCONFIGURED
                msg = "The nginx_status path is not configured."

        data = {
            "active_connections": {
                "label": "Number of open connections",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "connections",
            },
            "accepted_connections": {
                "label": "Number of accepted connections per second",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "connections/s",
            },
            "dropped_connections": {
                "label": "Number of dropped connections per second",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "connections/s",
            },
            "handled_connections": {
                "label": "Number of handled connections per second",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "connections/s",
            },
            "requests_per_second": {
                "label": "Average requests per second",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "requests/s",
            },
            "requests_per_connection": {
                "label": "Number of requests per connection",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "requests",
            },
            "nginx_reading": {
                "label": "Read request header",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "requests/s",
            },
            "nginx_writing": {
                "label": "Read request body",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "requests/s",
            },
            "nginx_waiting": {
                "label": "Keep alive connections",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "connections/s",
            },
            "4xx": {
                "label": "Rate of 4xx's events",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "entries/s",
            },
            "2xx": {
                "label": "Rate of 2xx's events",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "entries/s",
            },
            "5xx": {
                "label": "Rate of 5xx's events",
                "options": None,
                "status": status,
                "error_message": msg,
                "unit": "entries/s",
            },
        }

        return data

    @classmethod
    def get_metadata_docker(self, container, config):
        if "console_url" not in config:
            try:
                ip = agent_util.get_container_ip(container)
                config["console_url"] = "http://%s" % ip
            except Exception:
                import sys

                _, e, _ = sys.exc_info()
                self.log.exception(e)

        config["from_docker"] = True

        return self.get_metadata(config)

    def _calculate_delta(self, textkey, value, is_rate=True):
        """
        Extract the previous cached value, calculate the delta,
        and store the current one.
        """
        cached = self.get_cache_results("nginx:%s" % textkey, None)
        if not cached:
            self.log.info("Empty nginx cache! Building for first time")
            self.cache_result("nginx:%s" % textkey, None, value, replace=True)
            return None
        delta, previous_value = cached[0]
        self.cache_result("nginx:%s" % textkey, None, value, replace=True)
        if previous_value > value:
            return None
        if is_rate:
            return (value - previous_value) / float(delta)
        else:
            return value - previous_value

    def check(self, textkey, data, config):
        if not config.get("console_url"):
            config.update(self.DEFAULTS)
        result = urlopen("%s/nginx_status" % config["console_url"]).read().decode()
        statLines = result.split("\n")
        p = re.compile(r"(\d+)")
        connections = p.findall(statLines[2])
        connectionsByStatus = p.findall(statLines[3])
        result = 0
        status_map = {
            "nginx_reading": int(connectionsByStatus[0]),
            "nginx_writing": int(connectionsByStatus[1]),
            "nginx_waiting": int(connectionsByStatus[2]),
        }
        if textkey == "active_connections":
            active_connections = p.findall(statLines[0])
            result = int(active_connections[0])
        elif textkey == "requests_per_connection":
            active_connections = p.findall(statLines[0])
            active_connections = int(active_connections[0])
            requests = int(connections[2])
            requests_textkey = "%s:%s" % (textkey, "requests")
            requests_diff = self._calculate_delta(
                requests_textkey, requests, is_rate=False
            )
            if active_connections and requests_diff:
                return requests_diff / active_connections
            else:
                return None
        # All these values use the delta calculation method
        elif textkey in (
            "nginx_reading",
            "nginx_writing",
            "nginx_waiting",
            "requests_per_second",
            "accepted_connections",
            "handled_connections",
            "handles_request",
            "dropped_connections",
        ):
            # The only difference is in how they get the current value
            if textkey in ("nginx_reading", "nginx_writing", "nginx_waiting"):
                current_res = status_map[textkey]
            elif textkey == "accepted_connections":
                current_res = int(connections[0])
            elif textkey == "handled_connections":
                current_res = int(connections[1])
            elif textkey in ("requests_per_second"):
                current_res = int(connections[2])
            elif textkey in ("dropped_connections"):
                current_res = int(connections[0]) - int(connections[1])
            return self._calculate_delta(textkey, current_res)

        # Handle the log count metrics
        elif textkey in ("4xx", "5xx", "2xx"):
            log_files = [DEFAULT_NGINX_LOG]
            for key, value in config.items():
                if key not in ["debug", "console_url"]:
                    value = value.strip('"').strip("'")
                    if "*" in value:
                        log_files += glob.glob(value)
                    else:
                        log_files += [value]
            file_inodes = {}
            total_metrics = 0
            timescale = 1
            column = 8

            expression = LOG_COUNT_EXPRESSIONS.get(textkey)

            for target in log_files:
                # Extract the file current inode
                try:
                    file_inodes[target] = LogMatcher.get_file_inode(target)
                except OSError:
                    import sys

                    _, error, _ = sys.exc_info()
                    logging.error("Error opening %s file." % (target))
                    logging.error(error)
                    continue

                # Extract data from the agent cache about the check
                log_data = self.get_cache_results(
                    textkey, "%s/%s" % (self.schedule.id, target)
                )
                if log_data:
                    log_data = log_data[0][-1]
                else:
                    log_data = dict()

                last_line_number = log_data.get("last_known_line")
                stored_inode = log_data.get("inode")
                results = log_data.get("results", [])

                # Extract the lines of the file.
                try:
                    total_lines, current_lines = LogMatcher.get_file_lines(
                        last_line_number, target, file_inodes[target], stored_inode
                    )
                except IOError:
                    import sys

                    _, e, _ = sys.exc_info()
                    logging.error(
                        "Unable to read log file: %s. Make sure fm-agent user belongs to group adm"
                        % str(e)
                    )
                    continue

                logging.info(
                    "Stored line %s Current line %s Looking at %s lines"
                    % (str(last_line_number), str(total_lines), str(len(current_lines)))
                )
                # Perform the matching of the expression in the lines
                log_matcher = LogMatcher(stored_inode)
                results = log_matcher.match_in_column(current_lines, expression, column)
                metric, results = log_matcher.calculate_metric(results, timescale)
                total_metrics += metric and metric or 0
                logging.info(
                    'Found %s instances of "%s" in %s'
                    % (str(metric or 0), expression, target)
                )

                previous_result = self.get_cache_results(
                    textkey, "%s/%s" % (self.schedule.id, target)
                )
                cache_data = dict(
                    inode=file_inodes[target],
                    last_known_line=total_lines,
                    results=results,
                )
                self.cache_result(
                    textkey,
                    "%s/%s" % (self.schedule.id, target),
                    cache_data,
                    replace=True,
                )
                if not previous_result:
                    result = None
                else:
                    delta, prev_data = previous_result[0]
                    try:
                        prev_count = prev_data.get("results")[0][-1]
                        curr_count = cache_data.get("results")[0][-1]
                        result = curr_count / float(delta)
                    except IndexError:
                        result = None
        return result

    def check_docker(self, container, textkey, data, config):
        if "console_url" not in config:
            try:
                ip = agent_util.get_container_ip(container)
                config["console_url"] = "http://%s" % ip
            except Exception:
                import sys

                _, e, _ = sys.exc_info()
                self.log.exception(e)

        config["from_docker"] = True

        return self.check(textkey, data, config)

Zerion Mini Shell 1.0