aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJake Hunsaker <jhunsake@redhat.com>2020-04-27 14:57:12 -0400
committerJake Hunsaker <jhunsake@redhat.com>2020-05-29 13:06:36 -0400
commitad037308df9c08879918b5e4cf7481d7a71bbe0f (patch)
tree98e9eba441510d1f70daef2855526530101ad825
parent6150afbbc7db76743553b7284d382f4ce8b99d94 (diff)
downloadsos-ad037308df9c08879918b5e4cf7481d7a71bbe0f.tar.gz
[component|collect|report] Add metadata collection
Adds facilities to capture metadata information about a particular sos run, and save it to any resulting archive as `manifest.json`. This manifest will include component-specific information that should assist in analysis of sos runs. For `report`, this is an extension of existing reporting functionality, that records information on a per-plugin basis regarding command output and file collection. However, more information is recorded in this collection than the JSON or HTML reporting structures that review a finished archive. Manifest data is added at the time of collection; for commands this includes a breakout of command, parameters, full execution syntax, filepath relative to archive root, and the actual return code of the command, even if non-zero. For files, the copied files are grouped by the specification set in `add_copy_spec()` calls, I.E. for any file pattern regex that collects at least one file, all files collected by that regex are listed. Additionally, this adds a `tags` parameter to `add_copy_spec()`, `add_cmd_output()`, and `collect_cmd_output()` that if used will add tagging information to the manifest entries for those collections. This is intended to assist in future sos work (E.G. `sos examine`) and third party utilities in identifying relevant/connected data that is present in the archive. Note that all collections specified by a single call to any of these collection methods will inherit the same tags. For `collect` we now record the cluster type, node list, and a separate entry for each node in the node list that includes the sos version, node sos policy, and the archive within the collect tarball that was collected for that host. Closes: #1990 Resolves: #2050 Signed-off-by: Jake Hunsaker <jhunsake@redhat.com>
-rw-r--r--sos/archive.py23
-rw-r--r--sos/collector/__init__.py31
-rw-r--r--sos/collector/clusters/__init__.py4
-rw-r--r--sos/collector/clusters/ovirt.py5
-rw-r--r--sos/collector/sosnode.py13
-rw-r--r--sos/component.py61
-rw-r--r--sos/report/__init__.py42
-rw-r--r--sos/report/plugins/__init__.py84
8 files changed, 238 insertions, 25 deletions
diff --git a/sos/archive.py b/sos/archive.py
index b2256f2e..128e044f 100644
--- a/sos/archive.py
+++ b/sos/archive.py
@@ -16,6 +16,7 @@ import logging
import codecs
import errno
import stat
+from datetime import datetime
from threading import Lock
from sos.utilities import sos_get_command_output, is_executable
@@ -130,13 +131,15 @@ class FileCacheArchive(Archive):
_archive_root = ""
_archive_name = ""
- def __init__(self, name, tmpdir, policy, threads, enc_opts, sysroot):
+ def __init__(self, name, tmpdir, policy, threads, enc_opts, sysroot,
+ manifest=None):
self._name = name
self._tmp_dir = tmpdir
self._policy = policy
self._threads = threads
self.enc_opts = enc_opts
self.sysroot = sysroot or '/'
+ self.manifest = manifest
self._archive_root = os.path.join(tmpdir, name)
with self._path_lock:
os.makedirs(self._archive_root, 0o700)
@@ -532,6 +535,19 @@ class FileCacheArchive(Archive):
if os.path.isdir(self._archive_root):
shutil.rmtree(self._archive_root)
+ def add_final_manifest_data(self, method):
+ """Adds component-agnostic data to the manifest so that individual
+ SoSComponents do not need to redundantly add these manually
+ """
+ end = datetime.now()
+ start = self.manifest.start_time
+ run_time = end - start
+ self.manifest.add_field('end_time', end)
+ self.manifest.add_field('run_time', run_time)
+ self.manifest.add_field('compression', method)
+ self.add_string(self.manifest.get_json(indent=4),
+ os.path.join('sos_reports', 'manifest.json'))
+
def finalize(self, method):
self.log_info("finalizing archive '%s' using method '%s'"
% (self._archive_root, method))
@@ -607,9 +623,10 @@ class TarFileArchive(FileCacheArchive):
method = None
_with_selinux_context = False
- def __init__(self, name, tmpdir, policy, threads, enc_opts, sysroot):
+ def __init__(self, name, tmpdir, policy, threads, enc_opts, sysroot,
+ manifest=None):
super(TarFileArchive, self).__init__(name, tmpdir, policy, threads,
- enc_opts, sysroot)
+ enc_opts, sysroot, manifest)
self._suffix = "tar"
self._archive_name = os.path.join(tmpdir, self.name())
diff --git a/sos/collector/__init__.py b/sos/collector/__init__.py
index 300f518e..1bb491e9 100644
--- a/sos/collector/__init__.py
+++ b/sos/collector/__init__.py
@@ -97,6 +97,14 @@ class SoSCollector(SoSComponent):
self.retrieved = 0
self.cluster = None
self.cluster_type = None
+
+ # add manifest section for collect
+ self.manifest.components.add_section('collect')
+ # shorthand reference
+ self.collect_md = self.manifest.components.collect
+ # placeholders in manifest organization
+ self.collect_md.add_field('cluster_type', 'none')
+ self.collect_md.add_list('node_list')
# add a place to set/get the sudo password, but do not expose it via
# the CLI, because security is a thing
setattr(self.opts, 'sudo_pw', '')
@@ -689,6 +697,12 @@ class SoSCollector(SoSComponent):
'--no-local option if localhost should not be '
'included.\nAborting...\n', 1)
+ self.collect_md.add_field('master', self.master.address)
+ self.collect_md.add_section('nodes')
+ self.collect_md.nodes.add_section(self.master.address)
+ self.master.set_node_manifest(getattr(self.collect_md.nodes,
+ self.master.address))
+
if self.opts.cluster_type:
if self.opts.cluster_type == 'none':
self.cluster = self.clusters['jbon']
@@ -708,6 +722,7 @@ class SoSCollector(SoSComponent):
"cluster type and the node list")
self.cluster = self.clusters['jbon']
self.cluster_type = 'none'
+ self.collect_md.add_field('cluster_type', self.cluster_type)
if self.cluster:
self.master.cluster = self.cluster
self.cluster.setup()
@@ -784,6 +799,7 @@ class SoSCollector(SoSComponent):
self.sos_cmd = self.sos_cmd + ' '.join(sos_opts)
self.log_debug("Initial sos cmd set to %s" % self.sos_cmd)
self.commons['sos_cmd'] = self.sos_cmd
+ self.collect_md.add_field('initial_sos_cmd', self.sos_cmd)
def connect_to_master(self):
"""If run with --master, we will run cluster checks again that
@@ -859,6 +875,7 @@ class SoSCollector(SoSComponent):
self.node_list.remove(n)
self.node_list = list(set(n for n in self.node_list if n))
self.log_debug('Node list reduced to %s' % self.node_list)
+ self.collect_md.add_list('node_list', self.node_list)
def compare_node_to_regex(self, node):
"""Compares a discovered node name to a provided list of nodes from
@@ -932,6 +949,9 @@ class SoSCollector(SoSComponent):
client.set_cluster(self.cluster)
if client.connected:
self.client_list.append(client)
+ self.collect_md.nodes.add_section(node[0])
+ client.set_node_manifest(getattr(self.collect_md.nodes,
+ node[0]))
else:
client.close_ssh_session()
except Exception:
@@ -986,7 +1006,7 @@ this utility or remote systems that it connects to.
self.archive_name = self._get_archive_name()
self.setup_archive(name=self.archive_name)
self.archive_path = self.archive.get_archive_path()
- self.archive.makedirs('logs', 0o755)
+ self.archive.makedirs('sos_logs', 0o755)
self.collect()
self.cleanup()
@@ -1095,9 +1115,14 @@ this utility or remote systems that it connects to.
name = os.path.join(self.tmpdir, fname)
self.archive.add_file(name, dest=dest)
self.archive.add_file(self.sos_log_file,
- dest=os.path.join('logs', 'sos.log'))
+ dest=os.path.join('sos_logs', 'sos.log'))
self.archive.add_file(self.sos_ui_log_file,
- dest=os.path.join('logs', 'ui.log'))
+ dest=os.path.join('sos_logs', 'ui.log'))
+
+ if self.manifest is not None:
+ self.archive.add_final_manifest_data(
+ self.opts.compression_type
+ )
arc_name = self.archive.finalize(self.opts.compression_type)
final_name = os.path.join(self.sys_tmp, os.path.basename(arc_name))
diff --git a/sos/collector/clusters/__init__.py b/sos/collector/clusters/__init__.py
index a112ff49..43a4b49e 100644
--- a/sos/collector/clusters/__init__.py
+++ b/sos/collector/clusters/__init__.py
@@ -153,7 +153,9 @@ class Cluster():
"""Used by SosNode() to retrieve the appropriate label from the cluster
as set by set_node_label() in the cluster profile.
"""
- return self.set_node_label(node)
+ label = self.set_node_label(node)
+ node.manifest.add_field('label', label)
+ return label
def set_node_label(self, node):
"""This may be overridden by clusters.
diff --git a/sos/collector/clusters/ovirt.py b/sos/collector/clusters/ovirt.py
index bdce8cf7..36dae409 100644
--- a/sos/collector/clusters/ovirt.py
+++ b/sos/collector/clusters/ovirt.py
@@ -143,7 +143,10 @@ class ovirt(Cluster):
db_sos = self.exec_master_cmd(cmd, need_root=True)
for line in db_sos['stdout'].splitlines():
if fnmatch.fnmatch(line, '*sosreport-*tar*'):
- return line.strip()
+ _pg_dump = line.strip()
+ self.master.manifest.add_field('postgresql_dump',
+ _pg_dump.split('/')[-1])
+ return _pg_dump
self.log_error('Failed to gather database dump')
return False
diff --git a/sos/collector/sosnode.py b/sos/collector/sosnode.py
index 485826b4..1f5718bf 100644
--- a/sos/collector/sosnode.py
+++ b/sos/collector/sosnode.py
@@ -107,6 +107,14 @@ class SosNode():
def _fmt_msg(self, msg):
return '{:<{}} : {}'.format(self._hostname, self.hostlen + 1, msg)
+ def set_node_manifest(self, manifest):
+ """Set the manifest section that this node will write to
+ """
+ self.manifest = manifest
+ self.manifest.add_field('hostname', self._hostname)
+ self.manifest.add_field('policy', self.host.distro)
+ self.manifest.add_field('sos_version', self.sos_info['version'])
+
def check_in_container(self):
"""
Tries to identify if we are currently running in a container or not.
@@ -692,7 +700,9 @@ class SosNode():
self.log_debug('Requested to enable preset %s but preset does '
'not exist on node' % self.opts.preset)
- return "%s %s" % (sos_cmd, ' '.join(sos_opts))
+ _sos_cmd = "%s %s" % (sos_cmd, ' '.join(sos_opts))
+ self.manifest.add_field('final_sos_command', _sos_cmd)
+ return _sos_cmd
def determine_sos_label(self):
"""Determine what, if any, label should be added to the sosreport"""
@@ -724,6 +734,7 @@ class SosNode():
self.log_info('Final sos path: %s' % path)
self.sos_path = path
self.archive = path.split('/')[-1]
+ self.manifest.add_field('collected_archive', self.archive)
def determine_sos_error(self, rc, stdout):
if rc == -1:
diff --git a/sos/component.py b/sos/component.py
index cb7fd152..f966340a 100644
--- a/sos/component.py
+++ b/sos/component.py
@@ -9,6 +9,7 @@
#
# See the LICENSE file in the source distribution for further information.
+import json
import logging
import os
import tempfile
@@ -16,7 +17,9 @@ import sys
import sos.policies
from argparse import SUPPRESS
+from datetime import datetime
from shutil import rmtree
+from sos import __version__
from sos.archive import TarFileArchive
from sos.options import SoSOptions
from sos.utilities import TempFileUtil
@@ -65,6 +68,7 @@ class SoSComponent():
self.archive = None
self.tmpdir = None
self.tempfile_util = None
+ self.manifest = None
try:
import signal
@@ -78,6 +82,8 @@ class SoSComponent():
if self.configure_logging:
tmpdir = self.get_tmpdir_default()
+ # only setup metadata if we are logging
+ self.manifest = SoSMetadata()
if not os.path.isdir(tmpdir) \
or not os.access(tmpdir, os.W_OK):
@@ -100,6 +106,17 @@ class SoSComponent():
self._exit(0)
self._is_root = self.policy.is_root()
+ if self.manifest is not None:
+ self.manifest.add_field('version', __version__)
+ self.manifest.add_field('cmdline', ' '.join(self.cmdline))
+ self.manifest.add_field('start_time', datetime.now())
+ # these three will be set later, add here for organization
+ self.manifest.add_field('end_time', '')
+ self.manifest.add_field('run_time', '')
+ self.manifest.add_field('compression', '')
+ self.manifest.add_field('policy', self.policy.distro)
+ self.manifest.add_section('components')
+
def get_exit_handler(self):
def exit_handler(signum, frame):
self.exit_process = True
@@ -180,12 +197,14 @@ class SoSComponent():
auto_archive = self.policy.get_preferred_archive()
self.archive = auto_archive(archive_name, self.tmpdir,
self.policy, self.opts.threads,
- enc_opts, self.opts.sysroot)
+ enc_opts, self.opts.sysroot,
+ self.manifest)
else:
self.archive = TarFileArchive(archive_name, self.tmpdir,
self.policy, self.opts.threads,
- enc_opts, self.opts.sysroot)
+ enc_opts, self.opts.sysroot,
+ self.manifest)
self.archive.set_debug(True if self.opts.debug else False)
@@ -248,4 +267,42 @@ class SoSComponent():
def get_temp_file(self):
return self.tempfile_util.new()
+
+class SoSMetadata():
+ """This class is used to record metadata from a sos execution that will
+ then be stored as a JSON-formatted manifest within the final tarball.
+
+ It can be extended by adding further instances of SoSMetadata to represent
+ dict-like structures throughout the various sos bits that record to
+ metadata
+ """
+
+ def add_field(self, field_name, content):
+ """Add a key, value entry to the current metadata instance
+ """
+ setattr(self, field_name, content)
+
+ def add_section(self, section_name):
+ """Adds a new instance of SoSMetadata to the current instance
+ """
+ setattr(self, section_name, SoSMetadata())
+
+ def add_list(self, list_name, content=[]):
+ """Add a named list element to the current instance. If content is not
+ supplied, then add an empty list that can alter be appended to
+ """
+ if not isinstance(content, list):
+ raise TypeError('content added must be list')
+ setattr(self, list_name, content)
+
+ def get_json(self, indent=None):
+ """Convert contents of this SoSMetdata instance, and all other nested
+ instances (sections), into a json-formatted output.
+
+ Used to write manifest.json to the final archives.
+ """
+ return json.dumps(self,
+ default=lambda o: getattr(o, '__dict__', str(o)),
+ indent=indent)
+
# vim: set et ts=4 sw=4 :
diff --git a/sos/report/__init__.py b/sos/report/__init__.py
index f6ad28db..677d8693 100644
--- a/sos/report/__init__.py
+++ b/sos/report/__init__.py
@@ -129,6 +129,11 @@ class SoSReport(SoSComponent):
self._is_root = self.policy.is_root()
+ # add a manifest section for report
+ self.manifest.components.add_section('report')
+ # shorthand reference for ease of maintenance
+ self.report_md = self.manifest.components.report
+
# user specified command line preset
if self.opts.preset != self.arg_defaults["preset"]:
self.preset = self.policy.find_preset(self.opts.preset)
@@ -839,12 +844,20 @@ class SoSReport(SoSComponent):
self.ui_log.info(_(" Setting up plugins ..."))
for plugname, plug in self.loaded_plugins:
try:
+ self.report_md.plugins.add_section(plugname)
+ plug.set_plugin_manifest(getattr(self.report_md.plugins,
+ plugname))
+ start = datetime.now()
+ plug.manifest.add_field('setup_start', start)
plug.archive = self.archive
plug.add_default_collections()
plug.setup()
self.env_vars.update(plug._env_vars)
if self.opts.verify:
plug.setup_verify()
+ end = datetime.now()
+ plug.manifest.add_field('setup_end', end)
+ plug.manifest.add_field('setup_time', end - start)
except KeyboardInterrupt:
raise
except (OSError, IOError) as e:
@@ -902,15 +915,21 @@ class SoSReport(SoSComponent):
against the plugin as a whole"""
with ThreadPoolExecutor(1) as pool:
try:
+ _plug = self.loaded_plugins[plugin[0]-1][1]
t = pool.submit(self.collect_plugin, plugin)
# Re-type int 0 to NoneType, as otherwise result() will treat
# it as a literal 0-second timeout
- timeout = self.loaded_plugins[plugin[0]-1][1].timeout or None
+ timeout = _plug.timeout or None
+ start = datetime.now()
+ _plug.manifest.add_field('start_time', start)
t.result(timeout=timeout)
+ end = datetime.now()
+ _plug.manifest.add_field('end_time', end)
+ _plug.manifest.add_field('run_time', end - start)
except TimeoutError:
self.ui_log.error("\n Plugin %s timed out\n" % plugin[1])
self.running_plugs.remove(plugin[1])
- self.loaded_plugins[plugin[0]-1][1]._timeout_hit = True
+ self.loaded_plugins[plugin[0]-1][1].set_timeout_hit()
pool._threads.clear()
return True
@@ -1092,6 +1111,9 @@ class SoSReport(SoSComponent):
# All subsequent terminal output must use print().
self._add_sos_logs()
+ if self.manifest is not None:
+ self.archive.add_final_manifest_data(self.opts.compression_type)
+
archive = None # archive path
directory = None # report directory path (--build)
@@ -1214,6 +1236,21 @@ class SoSReport(SoSComponent):
return False
return True
+ def add_manifest_data(self):
+ """Add 'global' data to the manifest, that is any information that is
+ not plugin-specific
+ """
+ self.report_md.add_field('sysroot', self.sysroot)
+ self.report_md.add_field('preset', self.preset.name if self.preset else
+ 'unset')
+ self.report_md.add_list('profiles', self.opts.profiles)
+ self.report_md.add_section('devices')
+ for key, value in self.devices.items():
+ self.report_md.devices.add_list(key, value)
+ self.report_md.add_list('enabled_plugins', self.opts.enableplugins)
+ self.report_md.add_list('disabled_plugins', self.opts.noplugins)
+ self.report_md.add_section('plugins')
+
def execute(self):
try:
self.policy.set_commons(self.get_commons())
@@ -1240,6 +1277,7 @@ class SoSReport(SoSComponent):
if not self.verify_plugins():
return False
+ self.add_manifest_data()
self.batch()
self.prework()
self.setup()
diff --git a/sos/report/plugins/__init__.py b/sos/report/plugins/__init__.py
index cef08247..412f4489 100644
--- a/sos/report/plugins/__init__.py
+++ b/sos/report/plugins/__init__.py
@@ -439,6 +439,7 @@ class Plugin(object):
self.sysroot = commons['sysroot']
self.policy = commons['policy']
self.devices = commons['devices']
+ self.manifest = None
self.soslog = self.commons['soslog'] if 'soslog' in self.commons \
else logging.getLogger('sos')
@@ -455,6 +456,21 @@ class Plugin(object):
# Initialise the default --dry-run predicate
self.set_predicate(SoSPredicate(self))
+ def set_plugin_manifest(self, manifest):
+ """Pass in a manifest object to the plugin to write to
+ """
+ self.manifest = manifest
+ # add these here for organization when they actually get set later
+ self.manifest.add_field('start_time', '')
+ self.manifest.add_field('end_time', '')
+ self.manifest.add_field('run_time', '')
+ self.manifest.add_field('setup_start', '')
+ self.manifest.add_field('setup_end', '')
+ self.manifest.add_field('setup_time', '')
+ self.manifest.add_field('timeout_hit', False)
+ self.manifest.add_list('commands', [])
+ self.manifest.add_list('files', [])
+
@property
def timeout(self):
"""Returns either the default plugin timeout value, the value as
@@ -479,6 +495,11 @@ class Plugin(object):
return _timeout
return self.plugin_timeout
+ def set_timeout_hit(self):
+ self._timeout_hit = True
+ self.manifest.add_field('end_time', datetime.now())
+ self.manifest.add_field('timeout_hit', True)
+
def check_timeout(self):
"""
Checks to see if the plugin has hit its timeout.
@@ -1000,7 +1021,7 @@ class Plugin(object):
self.copy_paths.update(copy_paths)
def add_copy_spec(self, copyspecs, sizelimit=None, maxage=None,
- tailit=True, pred=None):
+ tailit=True, pred=None, tags=None):
"""Add a file or glob but limit it to sizelimit megabytes. Collect
files with mtime not older than maxage hours.
If fname is a single file the file will be tailed to meet sizelimit.
@@ -1034,6 +1055,9 @@ class Plugin(object):
if isinstance(copyspecs, str):
copyspecs = [copyspecs]
+ if isinstance(tags, str):
+ tags = [tags]
+
for copyspec in copyspecs:
if not (copyspec and len(copyspec)):
return False
@@ -1075,6 +1099,8 @@ class Plugin(object):
current_size = 0
limit_reached = False
+ _manifest_files = []
+
for _file in files:
if _file in self.copy_paths:
self._log_debug("skipping redundant file '%s'" % _file)
@@ -1111,16 +1137,24 @@ class Plugin(object):
link_path = os.path.join(rel_path, 'sos_strings',
self.name(), strfile)
self.archive.add_link(link_path, _file)
+ _manifest_files.append(_file.lstrip('/'))
else:
self._log_info("skipping '%s' over size limit" % _file)
else:
# size limit not hit, copy the file
+ _manifest_files.append(_file.lstrip('/'))
self._add_copy_paths([_file])
+ if self.manifest:
+ self.manifest.files.append({
+ 'specification': copyspec,
+ 'files_copied': _manifest_files,
+ 'tags': tags
+ })
def add_blockdev_cmd(self, cmds, devices='block', timeout=300,
sizelimit=None, chroot=True, runat=None, env=None,
binary=False, prepend_path=None, whitelist=[],
- blacklist=[]):
+ blacklist=[], tags=None):
"""Run a command or list of commands against storage-related devices.
Any commands specified by cmd will be iterated over the list of the
@@ -1146,11 +1180,13 @@ class Plugin(object):
self._add_device_cmd(cmds, devices, timeout=timeout,
sizelimit=sizelimit, chroot=chroot, runat=runat,
env=env, binary=binary, prepend_path=prepend_path,
- whitelist=whitelist, blacklist=blacklist)
+ whitelist=whitelist, blacklist=blacklist,
+ tags=tags)
def _add_device_cmd(self, cmds, devices, timeout=300, sizelimit=None,
chroot=True, runat=None, env=None, binary=False,
- prepend_path=None, whitelist=[], blacklist=[]):
+ prepend_path=None, whitelist=[], blacklist=[],
+ tags=None):
"""Run a command against all specified devices on the system.
"""
if isinstance(cmds, str):
@@ -1178,7 +1214,8 @@ class Plugin(object):
_cmd = cmd % {'dev': device}
self._add_cmd_output(cmd=_cmd, timeout=timeout,
sizelimit=sizelimit, chroot=chroot,
- runat=runat, env=env, binary=binary)
+ runat=runat, env=env, binary=binary,
+ tags=tags)
def _add_cmd_output(self, **kwargs):
"""Internal helper to add a single command to the collection list."""
@@ -1197,7 +1234,7 @@ class Plugin(object):
root_symlink=None, timeout=cmd_timeout, stderr=True,
chroot=True, runat=None, env=None, binary=False,
sizelimit=None, pred=None, subdir=None,
- changes=False, foreground=False):
+ changes=False, foreground=False, tags=None):
"""Run a program or a list of programs and collect the output"""
if isinstance(cmds, str):
cmds = [cmds]
@@ -1212,7 +1249,7 @@ class Plugin(object):
root_symlink=root_symlink, timeout=timeout,
stderr=stderr, chroot=chroot, runat=runat,
env=env, binary=binary, sizelimit=sizelimit,
- pred=pred, subdir=subdir,
+ pred=pred, subdir=subdir, tags=tags,
changes=changes, foreground=foreground)
def get_cmd_output_path(self, name=None, make=True):
@@ -1303,7 +1340,7 @@ class Plugin(object):
root_symlink=False, timeout=cmd_timeout,
stderr=True, chroot=True, runat=None, env=None,
binary=False, sizelimit=None, subdir=None,
- changes=False, foreground=False):
+ changes=False, foreground=False, tags=None):
"""Execute a command and save the output to a file for inclusion in the
report.
@@ -1325,6 +1362,7 @@ class Plugin(object):
:param subdir: Subdir in plugin directory to save to
:param changes: Does this cmd potentially make a change
on the system?
+ :param tags: Add tags in the archive manifest
:returns: dict containing status, output, and filename in the
archive for the executed cmd
@@ -1338,6 +1376,9 @@ class Plugin(object):
else:
root = None
+ if isinstance(tags, str):
+ tags = [tags]
+
start = time()
result = sos_get_command_output(
@@ -1351,6 +1392,16 @@ class Plugin(object):
"command '%s' timed out after %ds" % (cmd, timeout)
)
+ manifest_cmd = {
+ 'command': cmd.split(' ')[0],
+ 'parameters': cmd.split(' ')[1:],
+ 'exec': cmd,
+ 'filepath': None,
+ 'return_code': result['status'],
+ 'run_time': time() - start,
+ 'tags': tags
+ }
+
# command not found or not runnable
if result['status'] == 126 or result['status'] == 127:
# automatically retry chroot'ed commands in the host namespace
@@ -1368,10 +1419,14 @@ class Plugin(object):
# Exit here if the command was not found in the chroot check above
# as otherwise we will create a blank file in the archive
if result['status'] in [126, 127]:
- return result
+ if self.manifest:
+ self.manifest.commands.append(manifest_cmd)
+ return result
+
+ run_time = time() - start
self._log_debug("collected output of '%s' in %s (changes=%s)"
- % (cmd.split()[0], time() - start, changes))
+ % (cmd.split()[0], run_time, changes))
if suggest_filename:
outfn = self._make_command_filename(suggest_filename, subdir)
@@ -1395,13 +1450,17 @@ class Plugin(object):
os.path.join(self.archive.get_archive_path(), outfn) if outfn else
''
)
+ if self.manifest:
+ manifest_cmd['filepath'] = outfn
+ manifest_cmd['run_time'] = run_time
+ self.manifest.commands.append(manifest_cmd)
return result
def collect_cmd_output(self, cmd, suggest_filename=None,
root_symlink=False, timeout=cmd_timeout,
stderr=True, chroot=True, runat=None, env=None,
binary=False, sizelimit=None, pred=None,
- subdir=None):
+ subdir=None, tags=None):
"""Execute a command and save the output to a file for inclusion in the
report.
"""
@@ -1417,7 +1476,8 @@ class Plugin(object):
return self._collect_cmd_output(
cmd, suggest_filename=suggest_filename, root_symlink=root_symlink,
timeout=timeout, stderr=stderr, chroot=chroot, runat=runat,
- env=env, binary=binary, sizelimit=sizelimit, subdir=subdir
+ env=env, binary=binary, sizelimit=sizelimit, subdir=subdir,
+ tags=tags
)
def exec_cmd(self, cmd, timeout=cmd_timeout, stderr=True, chroot=True,