ok
Direktori : /usr/lib/fm-agent/library/ |
Current File : //usr/lib/fm-agent/library/plugin_manager.py |
from agent_util import Plugin import logging import os import sys import traceback import tempfile import tarfile import zipfile try: from hashlib import sha1 except: from sha import sha as sha1 try: # Python 2.x import urllib2 except: import urllib.request as urllib2 # In case of python 3 try: import ConfigParser as configparser except: import configparser def execfile3(filepath, globals=None, locals=None): if globals is None: globals = {} globals.update( { "__file__": filepath, "__name__": "__main__", } ) f = open(filepath, "rb") exec(compile(f.read(), filepath, "exec"), globals, locals) f.close() class PluginManager(object): def __init__(self, db, config_file, *directories): self.log = logging.getLogger(self.__class__.__name__) self.db = db self.config_file = config_file self.directories = directories self.plugins = self._load_plugins(directories) # Internal container for metadata so we can avoid repeatedly regenerating it self._metadata = None self._hashed_metadata = None # now we load all the plugin-specific configs and map them to # self.config self.config = {} if os.path.exists(self.config_file): config_file = configparser.ConfigParser() config_file.read(self.config_file) for textkey in config_file.sections(): try: debug = config_file.get("agent", "debug") except: debug = False self.config[textkey] = {"debug": debug} for k, v in config_file.items(textkey): self.config[textkey][k] = v def add_dem_wifi_results(self, wifi_results): dem_config = self.config.get("dem", None) if dem_config: dem_config["wifi_results"] = wifi_results def install_remote_plugins(self, url): """ Grab the remote url and extract the contents, putting them in the appropiate plugin folder. """ base_custom_plugin_dir = self.directories[-1] f = tempfile.NamedTemporaryFile(delete=False) num_installed = 0 ext = url.split(".")[-1] fname = url.split("/")[-1] try: r = urllib2.urlopen(url) if "content-disposition" in r.info(): ext = r.info().getheader("content-disposition").split(".")[-1] f.write(r.read()) f.close() except Exception: self.log.error("Unable to download URL: %s" % traceback.format_exc()) return if ext in ("tar", "tgz"): try: t = tarfile.open(f.name) for file in t.getnames(): if file.endswith(".py"): self.log.info(" Installing plugin %s" % file) t.extract(file, base_custom_plugin_dir) num_installed += 1 except Exception: self.log.error( "Unable to extract tar contents: %s" % traceback.format_exc() ) elif ext == "zip": try: z = zipfile.ZipFile(f.name) for file in z.namelist(): if file.endswith(".py"): self.log.info(" Installing plugin %s" % file) z.extract(file, base_custom_plugin_dir) num_installed += 1 except Exception: self.log.error( "Unable to extract zip contents: %s" % traceback.format_exc() ) elif ext == "py": self.log.info(" Installing plugin %s" % fname) os.system( "cp %s %s" % (f.name, os.path.join(base_custom_plugin_dir, fname)) ) num_installed += 1 else: self.log.error( "Unable to install Agent Plugin, unknown extension: %s" % ext ) if num_installed: self.log.info("\nInstalled %s Agent plugins" % num_installed) os.system("rm -f %s" % f.name) # run a specific plugin def check(self, schedule): try: self_key = self.config.get("agent", {}).get("server_key") container = None if ( getattr(schedule, "server_key", None) and schedule.server_key != self_key ): container_id = schedule.server_key.split(":")[-1] try: container = self.db["docker_containers"][container_id] except: if ( schedule.plugin_textkey == "docker" and schedule.resource_textkey == "status.running" ): # Special handling of "container is running" metrics. # Pass in a phony value to treat it as gone. container = {"State": "deleted"} else: self.log.error( "Could not find container with id %s", container_id ) return None plugin = self.plugins[schedule.plugin_textkey](schedule) config = self.config.get(schedule.plugin_textkey, {}) scale = config.get("scale", 1.0) if container: value = plugin.check_docker( container, schedule.resource_textkey, schedule.option, config ) else: value = plugin.check(schedule.resource_textkey, schedule.option, config) if value is not None: value *= scale except Exception: self.log.exception( "%s/%s check failed: %s" % ( schedule.plugin_textkey, schedule.resource_textkey, traceback.format_exc(), ) ) return None return value def _load_plugins(self, directories): plugins = {} plugin_blacklist = "" if os.path.exists(self.config_file): config_file = configparser.ConfigParser() config_file.read(self.config_file) try: plugin_blacklist = config_file.get("agent", "plugin_blacklist") except: plugin_blacklist = "" if plugin_blacklist: plugin_blacklist = list(map(str.strip, plugin_blacklist.split(","))) def plugin_file_checker(): for plugin_dir in directories: self.log.info("Looking in directory %r", plugin_dir) for name in os.listdir(plugin_dir): try: if name[-3:] != ".py" or name == "__init__.py": continue plugin_file = os.path.join(plugin_dir, name) # We've registed sys.modules['agent'] to equal THIS # module, so this exec has the effect of 'importing' the # plugin, and resolving 'import agent' to import this # module. This is so we never have to change the plugins # in order to whitelabel. f_globals = {} f_locals = {} if sys.version_info[0] == 3: execfile3(plugin_file, f_globals, f_globals) else: execfile(plugin_file, f_globals, f_globals) for obj in list(f_globals.values()): yield obj self.log.debug("Loaded %r", plugin_file) except Exception: t, e = sys.exc_info()[:2] self.log.info("Error importing plugin %s, skipping", name) self.log.debug(e) continue self.log.info("Loaded %d plugins", len(plugins)) for obj in plugin_file_checker(): if isinstance(obj, type): if ( issubclass(obj, Plugin) and hasattr(obj, "textkey") and obj.textkey not in plugin_blacklist ): if obj.textkey != "TEMPLATE_PLUGIN": plugins[obj.textkey] = obj return plugins # returns all the values needed to describe the plugins and their resources # in a json-serializable way. Uses local cache of values to avoid recomputing. @property def metadata(self): if self._metadata == None: # We haven't previously built the metadata - go do that and cache for later metadata = {} for plugin in list(self.plugins.values()): # get the config and push it up with the metadata config = self.config.get(plugin.textkey, {}) try: meta = format_metadata(plugin.get_metadata(config)) except: self.log.exception("error getting metadata from %r", plugin.textkey) continue self.log.info( "got %d textkeys for %r", len(list(meta.keys())), plugin.textkey ) label = getattr(plugin, "label", plugin.textkey) metadata[plugin.textkey] = (label, meta) self._metadata = metadata return self._metadata # returns the metadata hash, used for comparing whether or not our metadata # has changed since we last pushed. Uses local cache of values to avoid recomputing. def hashed_metadata(self): if self._hashed_metadata == None: flat_data = [] for textkey, (label, plugin_metadata) in list(self.metadata.items()): plugin_options = [] for resource_name, resource_metadata in list(plugin_metadata.items()): resource_meta = sorted(repr(resource_metadata)) plugin_options.append(repr((resource_name, resource_metadata))) plugin_options.sort() flat_data.append(repr((textkey, plugin_options, label))) flat_data.sort() self._hashed_metadata = sha1(repr(flat_data).encode()).hexdigest() return self._hashed_metadata def is_metadata_stale(self): self.log.info( "comparing metadata hashes %r with %r", self.hashed_metadata(), self.db["last_metadata"], ) return self.db["last_metadata"] != self.hashed_metadata() def format_metadata(metadata): """ Go over the metadata entry and make sure the option resources are matching to the new style of dictionaries if they are a string. """ for resource, entry in metadata.items(): options = entry["options"] new_options = [] if not options: continue for option in options: if sys.version_info[0] == 3: if ( type(option) == str ): # TODO Verify that unicode is present on old python installs. new_options.append({"resource": option}) else: if type(option) in ( str, unicode, ): # TODO Verify that unicode is present on old python installs. new_options.append({"resource": option}) if new_options: entry["options"] = new_options if not entry.get("options_schema"): entry["options_schema"] = {"resource": "string"} return metadata