diff options
author | Jake Hunsaker <jacob.r.hunsaker@gmail.com> | 2023-09-22 11:14:28 -0400 |
---|---|---|
committer | Jake Hunsaker <jacob.r.hunsaker@gmail.com> | 2023-09-23 19:18:58 -0400 |
commit | 62b4cec0e93855e1ad953389606d2d147672263c (patch) | |
tree | f0cd50155f25bc427ebd8e6151ebff4082c9c7d4 | |
parent | 9e12fbad2d849fef42fbc939a3de47bd9745a5da (diff) | |
download | sos-62b4cec0e93855e1ad953389606d2d147672263c.tar.gz |
[plugins,distros] Resolve code alerts for file not always closed
Fix a number of outstanding "File is not always closed" errors highlighted by
GitHub's code scanning.
This involves (almost exclusively) changing such as:
for line in open('/some/file', 'r').read().splitlines():
To using a context manager to ensure the file is always closed:
with open('/some/file', 'r') as sfile:
for line in sfile.read().splitlines():
...or something similar. Overall the logic of what a specific block of code was
doing (mostly in plugins) is not changed however.
Signed-off-by: Jake Hunsaker <jacob.r.hunsaker@gmail.com>
-rw-r--r-- | sos/archive.py | 20 | ||||
-rw-r--r-- | sos/cleaner/__init__.py | 17 | ||||
-rw-r--r-- | sos/policies/distros/redhat.py | 10 | ||||
-rw-r--r-- | sos/report/plugins/__init__.py | 15 | ||||
-rw-r--r-- | sos/report/plugins/candlepin.py | 4 | ||||
-rw-r--r-- | sos/report/plugins/firewall_tables.py | 6 | ||||
-rw-r--r-- | sos/report/plugins/foreman.py | 4 | ||||
-rw-r--r-- | sos/report/plugins/haproxy.py | 11 | ||||
-rw-r--r-- | sos/report/plugins/libvirt.py | 7 | ||||
-rw-r--r-- | sos/report/plugins/mssql.py | 27 | ||||
-rw-r--r-- | sos/report/plugins/pcp.py | 5 | ||||
-rw-r--r-- | sos/report/plugins/pulp.py | 4 | ||||
-rw-r--r-- | sos/report/plugins/pulpcore.py | 57 | ||||
-rw-r--r-- | sos/report/plugins/sos_extras.py | 45 |
14 files changed, 122 insertions, 110 deletions
diff --git a/sos/archive.py b/sos/archive.py index d76b7aa0..2a9706a2 100644 --- a/sos/archive.py +++ b/sos/archive.py @@ -390,14 +390,14 @@ class FileCacheArchive(Archive): # on file content. dest = self.check_path(dest, P_FILE, force=True) - f = codecs.open(dest, mode, encoding='utf-8') - if isinstance(content, bytes): - content = content.decode('utf8', 'ignore') - f.write(content) - if os.path.exists(src): - self._copy_attributes(src, dest) - self.log_debug("added string at '%s' to FileCacheArchive '%s'" - % (src, self._archive_root)) + with codecs.open(dest, mode, encoding='utf-8') as f: + if isinstance(content, bytes): + content = content.decode('utf8', 'ignore') + f.write(content) + if os.path.exists(src): + self._copy_attributes(src, dest) + self.log_debug("added string at '%s' to FileCacheArchive '%s'" + % (src, self._archive_root)) def add_binary(self, content, dest): with self._path_lock: @@ -405,8 +405,8 @@ class FileCacheArchive(Archive): if not dest: return - f = codecs.open(dest, 'wb', encoding=None) - f.write(content) + with codecs.open(dest, 'wb', encoding=None) as f: + f.write(content) self.log_debug("added binary content at '%s' to archive '%s'" % (dest, self._archive_root)) diff --git a/sos/cleaner/__init__.py b/sos/cleaner/__init__.py index fbcaa9c3..9aabd9c4 100644 --- a/sos/cleaner/__init__.py +++ b/sos/cleaner/__init__.py @@ -525,15 +525,14 @@ third party. """ try: hash_size = 1024**2 # Hash 1MiB of content at a time. - archive_fp = open(archive_path, 'rb') - digest = hashlib.new(self.hash_name) - while True: - hashdata = archive_fp.read(hash_size) - if not hashdata: - break - digest.update(hashdata) - archive_fp.close() - return digest.hexdigest() + '\n' + with open(archive_path, 'rb') as archive_fp: + digest = hashlib.new(self.hash_name) + while True: + hashdata = archive_fp.read(hash_size) + if not hashdata: + break + digest.update(hashdata) + return digest.hexdigest() + '\n' except Exception as err: self.log_debug("Could not generate new checksum: %s" % err) return None diff --git a/sos/policies/distros/redhat.py b/sos/policies/distros/redhat.py index 8085aae8..15241e27 100644 --- a/sos/policies/distros/redhat.py +++ b/sos/policies/distros/redhat.py @@ -466,8 +466,9 @@ support representative. if not os.path.exists(host_release): return False try: - for line in open(host_release, "r").read().splitlines(): - atomic |= ATOMIC_RELEASE_STR in line + with open(host_release, 'r') as afile: + for line in afile.read().splitlines(): + atomic |= ATOMIC_RELEASE_STR in line except IOError: pass return atomic @@ -551,8 +552,9 @@ support representative. return coreos host_release = os.environ[ENV_HOST_SYSROOT] + OS_RELEASE try: - for line in open(host_release, 'r').read().splitlines(): - coreos |= 'Red Hat Enterprise Linux CoreOS' in line + with open(host_release, 'r') as hfile: + for line in hfile.read().splitlines(): + coreos |= 'Red Hat Enterprise Linux CoreOS' in line except IOError: pass return coreos diff --git a/sos/report/plugins/__init__.py b/sos/report/plugins/__init__.py index 60910c39..b43bc4ea 100644 --- a/sos/report/plugins/__init__.py +++ b/sos/report/plugins/__init__.py @@ -3175,10 +3175,9 @@ class Plugin(): _pfname = self._make_command_filename(fname, subdir=subdir) self.archive.check_path(_pfname, P_FILE) _name = self.archive.dest_path(_pfname) - _file = open(_name, 'w') - self._log_debug(f"manual collection file opened: {_name}") - yield _file - _file.close() + with open(_name, 'w') as _file: + self._log_debug(f"manual collection file opened: {_name}") + yield _file end = time() run = end - start self._log_info(f"manual collection '{fname}' finished in {run}") @@ -3447,10 +3446,10 @@ class Plugin(): try: cmd_line_paths = glob.glob(cmd_line_glob) for path in cmd_line_paths: - f = open(self.path_join(path), 'r') - cmd_line = f.read().strip() - if process in cmd_line: - status = True + with open(self.path_join(path), 'r') as pfile: + cmd_line = pfile.read().strip() + if process in cmd_line: + status = True except IOError: return False return status diff --git a/sos/report/plugins/candlepin.py b/sos/report/plugins/candlepin.py index 67dc8d2e..9c14ea2d 100644 --- a/sos/report/plugins/candlepin.py +++ b/sos/report/plugins/candlepin.py @@ -30,7 +30,9 @@ class Candlepin(Plugin, RedHatPlugin): self.dbpasswd = "" cfg_file = "/etc/candlepin/candlepin.conf" try: - for line in open(cfg_file).read().splitlines(): + with open(cfg_file, 'r') as cfile: + candle_lines = cfile.read().splitlines() + for line in candle_lines: # skip empty lines and lines with comments if not line or line[0] == '#': continue diff --git a/sos/report/plugins/firewall_tables.py b/sos/report/plugins/firewall_tables.py index 590e9849..372e6e47 100644 --- a/sos/report/plugins/firewall_tables.py +++ b/sos/report/plugins/firewall_tables.py @@ -77,7 +77,8 @@ class firewall_tables(Plugin, IndependentPlugin): # do collect them only when relevant nft list ruleset exists default_ip_tables = "mangle\nfilter\nnat\n" try: - ip_tables_names = open("/proc/net/ip_tables_names").read() + with open('/proc/net/ip_tables_names', 'r') as ifile: + ip_tables_names = ifile.read() except IOError: ip_tables_names = default_ip_tables for table in ip_tables_names.splitlines(): @@ -85,7 +86,8 @@ class firewall_tables(Plugin, IndependentPlugin): self.collect_iptable(table) # collect the same for ip6tables try: - ip_tables_names = open("/proc/net/ip6_tables_names").read() + with open('/proc/net/ip6_tables_names', 'r') as ipfile: + ip_tables_names = ipfile.read() except IOError: ip_tables_names = default_ip_tables for table in ip_tables_names.splitlines(): diff --git a/sos/report/plugins/foreman.py b/sos/report/plugins/foreman.py index cea68ae2..a3ee81b7 100644 --- a/sos/report/plugins/foreman.py +++ b/sos/report/plugins/foreman.py @@ -43,7 +43,9 @@ class Foreman(Plugin): self.dbhost = "localhost" self.dbpasswd = "" try: - for line in open("/etc/foreman/database.yml").read().splitlines(): + with open('/etc/foreman/database.yml', 'r') as dfile: + foreman_lines = dfile.read().splitlines() + for line in foreman_lines: # skip empty lines and lines with comments if not line or line[0] == '#': continue diff --git a/sos/report/plugins/haproxy.py b/sos/report/plugins/haproxy.py index 1b3a26d2..5d71e223 100644 --- a/sos/report/plugins/haproxy.py +++ b/sos/report/plugins/haproxy.py @@ -48,11 +48,12 @@ class HAProxy(Plugin, RedHatPlugin, DebianPlugin): matched = None provision_ip = None try: - for line in open("/etc/haproxy/haproxy.cfg").read().splitlines(): - if matched: - provision_ip = line.split()[1] - break - matched = match(r".*haproxy\.stats.*", line) + with open("/etc/haproxy/haproxy.cfg", 'r') as hfile: + for line in hfile.read().splitlines(): + if matched: + provision_ip = line.split()[1] + break + matched = match(r".*haproxy\.stats.*", line) except IOError: # fallback when the cfg file is not accessible pass diff --git a/sos/report/plugins/libvirt.py b/sos/report/plugins/libvirt.py index c36bd022..60c439ef 100644 --- a/sos/report/plugins/libvirt.py +++ b/sos/report/plugins/libvirt.py @@ -69,9 +69,10 @@ class Libvirt(Plugin, IndependentPlugin): # get details of processes of KVM hosts for pidfile in glob.glob("/run/libvirt/*/*.pid"): - pid = open(pidfile).read().splitlines()[0] - for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]: - self.add_copy_spec("/proc/%s/%s" % (pid, pf)) + with open(pidfile, 'r') as pfile: + pid = pfile.read().splitlines()[0] + for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]: + self.add_copy_spec("/proc/%s/%s" % (pid, pf)) self.add_file_tags({ "/run/libvirt/qemu/*.xml": "var_qemu_xml", diff --git a/sos/report/plugins/mssql.py b/sos/report/plugins/mssql.py index d9dce0f9..9a4d643c 100644 --- a/sos/report/plugins/mssql.py +++ b/sos/report/plugins/mssql.py @@ -43,19 +43,20 @@ class MsSQL(Plugin, RedHatPlugin): sqlagent_errorlogfile = '/var/opt/mssql/log/sqlagentstartup.log' kerberoskeytabfile = None try: - for line in open(mssql_conf).read().splitlines(): - if line.startswith('['): - section = line - continue - words = line.split('=') - if words[0].strip() == 'errorlogfile': - if section == '[filelocation]': - errorlogfile = words[1].strip() - elif section == '[sqlagent]': - sqlagent_errorlogfile = words[1].strip() - elif words[0].strip() == 'kerberoskeytabfile': - if section == '[network]': - kerberoskeytabfile = words[1].strip() + with open(mssql_conf, 'r') as mfile: + for line in mfile.read().splitlines(): + if line.startswith('['): + section = line + continue + words = line.split('=') + if words[0].strip() == 'errorlogfile': + if section == '[filelocation]': + errorlogfile = words[1].strip() + elif section == '[sqlagent]': + sqlagent_errorlogfile = words[1].strip() + elif words[0].strip() == 'kerberoskeytabfile': + if section == '[network]': + kerberoskeytabfile = words[1].strip() except IOError as ex: self._log_error('Could not open conf file %s: %s' % (mssql_conf, ex)) diff --git a/sos/report/plugins/pcp.py b/sos/report/plugins/pcp.py index a979deab..2c6772e2 100644 --- a/sos/report/plugins/pcp.py +++ b/sos/report/plugins/pcp.py @@ -47,9 +47,8 @@ class Pcp(Plugin, RedHatPlugin, DebianPlugin): def pcp_parse_conffile(self): try: - pcpconf = open(self.pcp_conffile, "r") - lines = pcpconf.readlines() - pcpconf.close() + with open(self.pcp_conffile, "r") as pcpconf: + lines = pcpconf.readlines() except IOError: return False env_vars = {} diff --git a/sos/report/plugins/pulp.py b/sos/report/plugins/pulp.py index 19484b07..df007168 100644 --- a/sos/report/plugins/pulp.py +++ b/sos/report/plugins/pulp.py @@ -45,7 +45,9 @@ class Pulp(Plugin, RedHatPlugin): self.messaging_cert_file = "" in_messaging_section = False try: - for line in open("/etc/pulp/server.conf").read().splitlines(): + with open("/etc/pulp/server.conf", 'r') as pfile: + pulp_lines = pfile.read().splitlines() + for line in pulp_lines: if match(r"^\s*seeds:\s+\S+:\S+", line): uri = line.split()[1].split(',')[0].split(':') self.dbhost = uri[0] diff --git a/sos/report/plugins/pulpcore.py b/sos/report/plugins/pulpcore.py index ad459111..04efae9f 100644 --- a/sos/report/plugins/pulpcore.py +++ b/sos/report/plugins/pulpcore.py @@ -45,34 +45,35 @@ class PulpCore(Plugin, IndependentPlugin): return val try: - # split the lines to "one option per line" format - for line in open("/etc/pulp/settings.py").read() \ - .replace(',', ',\n').replace('{', '{\n') \ - .replace('}', '\n}').splitlines(): - # skip empty lines and lines with comments - if not line or line[0] == '#': - continue - if line.startswith("DATABASES"): - databases_scope = True - continue - # example HOST line to parse: - # 'HOST': 'localhost', - pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+" - if databases_scope and match(pattern % 'HOST', line): - self.dbhost = separate_value(line) - if databases_scope and match(pattern % 'PORT', line): - self.dbport = separate_value(line) - if databases_scope and match(pattern % 'NAME', line): - self.dbname = separate_value(line) - if databases_scope and match(pattern % 'PASSWORD', line): - self.dbpasswd = separate_value(line) - # if line contains closing '}' database_scope end - if databases_scope and '}' in line: - databases_scope = False - if line.startswith("STATIC_ROOT = "): - self.staticroot = separate_value(line, sep='=') - if line.startswith("CHUNKED_UPLOAD_DIR = "): - self.uploaddir = separate_value(line, sep='=') + with open("/etc/pulp/settings.py", 'r') as pfile: + # split the lines to "one option per line" format + for line in pfile.read() \ + .replace(',', ',\n').replace('{', '{\n') \ + .replace('}', '\n}').splitlines(): + # skip empty lines and lines with comments + if not line or line[0] == '#': + continue + if line.startswith("DATABASES"): + databases_scope = True + continue + # example HOST line to parse: + # 'HOST': 'localhost', + pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+" + if databases_scope and match(pattern % 'HOST', line): + self.dbhost = separate_value(line) + if databases_scope and match(pattern % 'PORT', line): + self.dbport = separate_value(line) + if databases_scope and match(pattern % 'NAME', line): + self.dbname = separate_value(line) + if databases_scope and match(pattern % 'PASSWORD', line): + self.dbpasswd = separate_value(line) + # if line contains closing '}' database_scope end + if databases_scope and '}' in line: + databases_scope = False + if line.startswith("STATIC_ROOT = "): + self.staticroot = separate_value(line, sep='=') + if line.startswith("CHUNKED_UPLOAD_DIR = "): + self.uploaddir = separate_value(line, sep='=') except IOError: # fallback when the cfg file is not accessible pass diff --git a/sos/report/plugins/sos_extras.py b/sos/report/plugins/sos_extras.py index 55bc4dc0..71cc8787 100644 --- a/sos/report/plugins/sos_extras.py +++ b/sos/report/plugins/sos_extras.py @@ -61,28 +61,29 @@ class SosExtras(Plugin, IndependentPlugin): _file = self.path_join(path, f) self._log_warn("Collecting data from extras file %s" % _file) try: - for line in open(_file).read().splitlines(): - # ignore empty lines or comments - if len(line.split()) == 0 or line.startswith('#'): - continue - # lines starting by ':' specify file pattern to collect - # optionally followed by sizelimit - if line.startswith(':'): - words = line.split() - limit = None - if len(words) > 1: - try: - limit = int(words[1]) - except ValueError: - self._log_warn("Can't decode integer" - " sizelimit on line '%s'" - " in file %s, using" - " default." - % (line, _file)) - self.add_copy_spec(words[0][1:], sizelimit=limit) - else: - # command to execute - self.add_cmd_output(line, subdir=f) + with open(_file, 'r') as sfile: + for line in sfile.read().splitlines(): + # ignore empty lines or comments + if len(line.split()) == 0 or line.startswith('#'): + continue + # lines starting by ':' specify file pattern to + # collect optionally followed by sizelimit + if line.startswith(':'): + words = line.split() + limit = None + if len(words) > 1: + try: + limit = int(words[1]) + except ValueError: + self._log_warn( + f"Can't decode size limit on line" + f"{line} in {_file}, using default" + ) + self.add_copy_spec(words[0][1:], + sizelimit=limit) + else: + # command to execute + self.add_cmd_output(line, subdir=f) except IOError: self._log_warn("unable to read extras file %s" % _file) |