Close all open filehandle (#50544)
Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
This commit is contained in:
parent
94a1d86d70
commit
db8702cdb8
21 changed files with 81 additions and 47 deletions
|
@ -748,7 +748,8 @@ def _find_module_utils(module_name, b_module_data, module_path, module_args, tas
|
||||||
# the write lock. Go ahead and read the data from disk
|
# the write lock. Go ahead and read the data from disk
|
||||||
# instead of re-creating it.
|
# instead of re-creating it.
|
||||||
try:
|
try:
|
||||||
zipdata = open(cached_module_filename, 'rb').read()
|
with open(cached_module_filename, 'rb') as f:
|
||||||
|
zipdata = f.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
raise AnsibleError('A different worker process failed to create module file. '
|
raise AnsibleError('A different worker process failed to create module file. '
|
||||||
'Look at traceback for that process for debugging information.')
|
'Look at traceback for that process for debugging information.')
|
||||||
|
|
|
@ -80,15 +80,16 @@ def load_privatekey(path, passphrase=None):
|
||||||
"""Load the specified OpenSSL private key."""
|
"""Load the specified OpenSSL private key."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if passphrase:
|
with open(path, 'rb') as b_priv_key_fh:
|
||||||
privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM,
|
priv_key_detail = b_priv_key_fh.read()
|
||||||
open(path, 'rb').read(),
|
|
||||||
to_bytes(passphrase))
|
|
||||||
else:
|
|
||||||
privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM,
|
|
||||||
open(path, 'rb').read())
|
|
||||||
|
|
||||||
return privatekey
|
if passphrase:
|
||||||
|
return crypto.load_privatekey(crypto.FILETYPE_PEM,
|
||||||
|
priv_key_detail,
|
||||||
|
to_bytes(passphrase))
|
||||||
|
else:
|
||||||
|
return crypto.load_privatekey(crypto.FILETYPE_PEM,
|
||||||
|
priv_key_detail)
|
||||||
except (IOError, OSError) as exc:
|
except (IOError, OSError) as exc:
|
||||||
raise OpenSSLObjectError(exc)
|
raise OpenSSLObjectError(exc)
|
||||||
|
|
||||||
|
@ -97,9 +98,9 @@ def load_certificate(path):
|
||||||
"""Load the specified certificate."""
|
"""Load the specified certificate."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cert_content = open(path, 'rb').read()
|
with open(path, 'rb') as cert_fh:
|
||||||
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
|
cert_content = cert_fh.read()
|
||||||
return cert
|
return crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
|
||||||
except (IOError, OSError) as exc:
|
except (IOError, OSError) as exc:
|
||||||
raise OpenSSLObjectError(exc)
|
raise OpenSSLObjectError(exc)
|
||||||
|
|
||||||
|
@ -108,9 +109,9 @@ def load_certificate_request(path):
|
||||||
"""Load the specified certificate signing request."""
|
"""Load the specified certificate signing request."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
csr_content = open(path, 'rb').read()
|
with open(path, 'rb') as csr_fh:
|
||||||
csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_content)
|
csr_content = csr_fh.read()
|
||||||
return csr
|
return crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_content)
|
||||||
except (IOError, OSError) as exc:
|
except (IOError, OSError) as exc:
|
||||||
raise OpenSSLObjectError(exc)
|
raise OpenSSLObjectError(exc)
|
||||||
|
|
||||||
|
|
|
@ -195,7 +195,9 @@ class LinuxVirtual(Virtual):
|
||||||
# Check whether this is a RHEV hypervisor (is vdsm running ?)
|
# Check whether this is a RHEV hypervisor (is vdsm running ?)
|
||||||
for f in glob.glob('/proc/[0-9]*/comm'):
|
for f in glob.glob('/proc/[0-9]*/comm'):
|
||||||
try:
|
try:
|
||||||
if open(f).read().rstrip() == 'vdsm':
|
with open(f) as virt_fh:
|
||||||
|
comm_content = virt_fh.read().rstrip()
|
||||||
|
if comm_content == 'vdsm':
|
||||||
virtual_facts['virtualization_type'] = 'RHEV'
|
virtual_facts['virtualization_type'] = 'RHEV'
|
||||||
break
|
break
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
|
@ -231,7 +231,9 @@ class ACIModule(object):
|
||||||
self.params['certificate_name'] = os.path.basename(os.path.splitext(self.params['private_key'])[0])
|
self.params['certificate_name'] = os.path.basename(os.path.splitext(self.params['private_key'])[0])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sig_key = load_privatekey(FILETYPE_PEM, open(self.params['private_key'], 'r').read())
|
with open(self.params['private_key'], 'r') as priv_key_fh:
|
||||||
|
private_key_content = priv_key_fh.read()
|
||||||
|
sig_key = load_privatekey(FILETYPE_PEM, private_key_content)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.module.fail_json(msg='Cannot load private key %s' % self.params['private_key'])
|
self.module.fail_json(msg='Cannot load private key %s' % self.params['private_key'])
|
||||||
|
|
||||||
|
|
|
@ -203,7 +203,8 @@ class NetworkConfig(object):
|
||||||
self._items = self.parse(s)
|
self._items = self.parse(s)
|
||||||
|
|
||||||
def loadfp(self, fp):
|
def loadfp(self, fp):
|
||||||
return self.load(open(fp).read())
|
with open(fp) as f:
|
||||||
|
return self.load(f.read())
|
||||||
|
|
||||||
def parse(self, lines, comment_tokens=None):
|
def parse(self, lines, comment_tokens=None):
|
||||||
toplevel = re.compile(r'\S')
|
toplevel = re.compile(r'\S')
|
||||||
|
|
|
@ -88,12 +88,11 @@ class RabbitClient():
|
||||||
''' Consider some file size limits here '''
|
''' Consider some file size limits here '''
|
||||||
def _read_file(self, path):
|
def _read_file(self, path):
|
||||||
try:
|
try:
|
||||||
fh = open(path, "rb").read()
|
with open(path, "rb") as file_handle:
|
||||||
|
return file_handle.read()
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
self.module.fail_json(msg="Unable to open file %s: %s" % (path, to_native(e)))
|
self.module.fail_json(msg="Unable to open file %s: %s" % (path, to_native(e)))
|
||||||
|
|
||||||
return fh
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_file_mime_type(path):
|
def _check_file_mime_type(path):
|
||||||
mime = MimeTypes()
|
mime = MimeTypes()
|
||||||
|
|
|
@ -623,7 +623,8 @@ def main():
|
||||||
stack_params['StackName'] = module.params['stack_name']
|
stack_params['StackName'] = module.params['stack_name']
|
||||||
|
|
||||||
if module.params['template'] is not None:
|
if module.params['template'] is not None:
|
||||||
stack_params['TemplateBody'] = open(module.params['template'], 'r').read()
|
with open(module.params['template'], 'r') as template_fh:
|
||||||
|
stack_params['TemplateBody'] = template_fh.read()
|
||||||
elif module.params['template_body'] is not None:
|
elif module.params['template_body'] is not None:
|
||||||
stack_params['TemplateBody'] = module.params['template_body']
|
stack_params['TemplateBody'] = module.params['template_body']
|
||||||
elif module.params['template_url'] is not None:
|
elif module.params['template_url'] is not None:
|
||||||
|
@ -636,7 +637,8 @@ def main():
|
||||||
|
|
||||||
# can't check the policy when verifying.
|
# can't check the policy when verifying.
|
||||||
if module.params['stack_policy'] is not None and not module.check_mode and not module.params['create_changeset']:
|
if module.params['stack_policy'] is not None and not module.check_mode and not module.params['create_changeset']:
|
||||||
stack_params['StackPolicyBody'] = open(module.params['stack_policy'], 'r').read()
|
with open(module.params['stack_policy'], 'r') as stack_policy_fh:
|
||||||
|
stack_params['StackPolicyBody'] = stack_policy_fh.read()
|
||||||
|
|
||||||
template_parameters = module.params['template_parameters']
|
template_parameters = module.params['template_parameters']
|
||||||
|
|
||||||
|
|
|
@ -218,11 +218,14 @@ def cert_action(module, iam, name, cpath, new_name, new_path, state,
|
||||||
def load_data(cert, key, cert_chain):
|
def load_data(cert, key, cert_chain):
|
||||||
# if paths are provided rather than lookups read the files and return the contents
|
# if paths are provided rather than lookups read the files and return the contents
|
||||||
if cert and os.path.isfile(cert):
|
if cert and os.path.isfile(cert):
|
||||||
cert = open(cert, 'r').read().rstrip()
|
with open(cert, 'r') as cert_fh:
|
||||||
|
cert = cert_fh.read().rstrip()
|
||||||
if key and os.path.isfile(key):
|
if key and os.path.isfile(key):
|
||||||
key = open(key, 'r').read().rstrip()
|
with open(key, 'r') as key_fh:
|
||||||
|
key = key_fh.read().rstrip()
|
||||||
if cert_chain and os.path.isfile(cert_chain):
|
if cert_chain and os.path.isfile(cert_chain):
|
||||||
cert_chain = open(cert_chain, 'r').read()
|
with open(cert_chain, 'r') as cert_chain_fh:
|
||||||
|
cert_chain = cert_chain_fh.read()
|
||||||
return cert, key, cert_chain
|
return cert, key, cert_chain
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -115,8 +115,8 @@ def main():
|
||||||
public_key = module.params['public_key']
|
public_key = module.params['public_key']
|
||||||
|
|
||||||
if module.params['public_key_file']:
|
if module.params['public_key_file']:
|
||||||
public_key = open(module.params['public_key_file']).read()
|
with open(module.params['public_key_file']) as public_key_fh:
|
||||||
public_key = public_key.rstrip()
|
public_key = public_key_fh.read().rstrip()
|
||||||
|
|
||||||
sdk, cloud = openstack_cloud_from_module(module)
|
sdk, cloud = openstack_cloud_from_module(module)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -255,8 +255,9 @@ class Pkcs(crypto_utils.OpenSSLObject):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.remove()
|
self.remove()
|
||||||
|
with open(self.src, 'rb') as pkcs12_fh:
|
||||||
p12 = crypto.load_pkcs12(open(self.src, 'rb').read(),
|
pkcs12_content = pkcs12_fh.read()
|
||||||
|
p12 = crypto.load_pkcs12(pkcs12_content,
|
||||||
self.passphrase)
|
self.passphrase)
|
||||||
pkey = crypto.dump_privatekey(crypto.FILETYPE_PEM,
|
pkey = crypto.dump_privatekey(crypto.FILETYPE_PEM,
|
||||||
p12.get_privatekey())
|
p12.get_privatekey())
|
||||||
|
|
|
@ -170,7 +170,8 @@ class PublicKey(crypto_utils.OpenSSLObject):
|
||||||
if not self.check(module, perms_required=False) or self.force:
|
if not self.check(module, perms_required=False) or self.force:
|
||||||
try:
|
try:
|
||||||
if self.format == 'OpenSSH':
|
if self.format == 'OpenSSH':
|
||||||
privatekey_content = open(self.privatekey_path, 'rb').read()
|
with open(self.privatekey_path, 'rb') as private_key_fh:
|
||||||
|
privatekey_content = private_key_fh.read()
|
||||||
key = crypto_serialization.load_pem_private_key(privatekey_content,
|
key = crypto_serialization.load_pem_private_key(privatekey_content,
|
||||||
password=self.privatekey_passphrase,
|
password=self.privatekey_passphrase,
|
||||||
backend=default_backend())
|
backend=default_backend())
|
||||||
|
@ -212,7 +213,8 @@ class PublicKey(crypto_utils.OpenSSLObject):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
publickey_content = open(self.path, 'rb').read()
|
with open(self.path, 'rb') as public_key_fh:
|
||||||
|
publickey_content = public_key_fh.read()
|
||||||
if self.format == 'OpenSSH':
|
if self.format == 'OpenSSH':
|
||||||
current_publickey = crypto_serialization.load_ssh_public_key(publickey_content, backend=default_backend())
|
current_publickey = crypto_serialization.load_ssh_public_key(publickey_content, backend=default_backend())
|
||||||
publickey_content = current_publickey.public_bytes(crypto_serialization.Encoding.PEM,
|
publickey_content = current_publickey.public_bytes(crypto_serialization.Encoding.PEM,
|
||||||
|
|
|
@ -128,7 +128,8 @@ def assemble_from_fragments(src_path, delimiter=None, compiled_regexp=None, igno
|
||||||
fragment = os.path.join(src_path, f)
|
fragment = os.path.join(src_path, f)
|
||||||
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
||||||
continue
|
continue
|
||||||
fragment_content = open(fragment, 'rb').read()
|
with open(fragment, 'rb') as fragment_fh:
|
||||||
|
fragment_content = fragment_fh.read()
|
||||||
|
|
||||||
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
||||||
if add_newline:
|
if add_newline:
|
||||||
|
|
|
@ -170,7 +170,9 @@ ZIP_FILE_MODE_RE = re.compile(r'([r-][w-][SsTtx-]){3}')
|
||||||
|
|
||||||
def crc32(path):
|
def crc32(path):
|
||||||
''' Return a CRC32 checksum of a file '''
|
''' Return a CRC32 checksum of a file '''
|
||||||
return binascii.crc32(open(path, 'rb').read()) & 0xffffffff
|
with open(path, 'rb') as f:
|
||||||
|
file_content = f.read()
|
||||||
|
return binascii.crc32(file_content) & 0xffffffff
|
||||||
|
|
||||||
|
|
||||||
def shell_escape(string):
|
def shell_escape(string):
|
||||||
|
|
|
@ -79,7 +79,9 @@ def main():
|
||||||
if not os.access(source, os.R_OK):
|
if not os.access(source, os.R_OK):
|
||||||
module.fail_json(msg="file is not readable: %s" % source)
|
module.fail_json(msg="file is not readable: %s" % source)
|
||||||
|
|
||||||
data = base64.b64encode(open(source, 'rb').read())
|
with open(source, 'rb') as source_fh:
|
||||||
|
source_content = source_fh.read()
|
||||||
|
data = base64.b64encode(source_content)
|
||||||
|
|
||||||
module.exit_json(content=data, source=source, encoding='base64')
|
module.exit_json(content=data, source=source, encoding='base64')
|
||||||
|
|
||||||
|
|
|
@ -108,7 +108,8 @@ def get_matching_jobs(module, at_cmd, script_file):
|
||||||
return matching_jobs
|
return matching_jobs
|
||||||
|
|
||||||
# Read script_file into a string.
|
# Read script_file into a string.
|
||||||
script_file_string = open(script_file).read().strip()
|
with open(script_file) as script_fh:
|
||||||
|
script_file_string = script_fh.read().strip()
|
||||||
|
|
||||||
# Loop through the jobs.
|
# Loop through the jobs.
|
||||||
# If the script text is contained in a job add job number to list.
|
# If the script text is contained in a job add job number to list.
|
||||||
|
|
|
@ -686,12 +686,15 @@ class LinuxService(Service):
|
||||||
override_file_name = "%s/%s.override" % (initpath, self.name)
|
override_file_name = "%s/%s.override" % (initpath, self.name)
|
||||||
|
|
||||||
# Check to see if files contain the manual line in .conf and fail if True
|
# Check to see if files contain the manual line in .conf and fail if True
|
||||||
if manreg.search(open(conf_file_name).read()):
|
with open(conf_file_name) as conf_file_fh:
|
||||||
|
conf_file_content = conf_file_fh.read()
|
||||||
|
if manreg.search(conf_file_content):
|
||||||
self.module.fail_json(msg="manual stanza not supported in a .conf file")
|
self.module.fail_json(msg="manual stanza not supported in a .conf file")
|
||||||
|
|
||||||
self.changed = False
|
self.changed = False
|
||||||
if os.path.exists(override_file_name):
|
if os.path.exists(override_file_name):
|
||||||
override_file_contents = open(override_file_name).read()
|
with open(override_file_name) as override_fh:
|
||||||
|
override_file_contents = override_fh.read()
|
||||||
# Remove manual stanza if present and service enabled
|
# Remove manual stanza if present and service enabled
|
||||||
if self.enable and manreg.search(override_file_contents):
|
if self.enable and manreg.search(override_file_contents):
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
|
@ -77,8 +77,8 @@ def main():
|
||||||
|
|
||||||
data = None
|
data = None
|
||||||
try:
|
try:
|
||||||
data = open(log_path).read()
|
with open(log_path) as f:
|
||||||
data = json.loads(data)
|
data = json.loads(f.read())
|
||||||
except Exception:
|
except Exception:
|
||||||
if not data:
|
if not data:
|
||||||
# file not written yet? That means it is running
|
# file not written yet? That means it is running
|
||||||
|
|
|
@ -430,8 +430,9 @@ class JenkinsPlugin(object):
|
||||||
md5sum_old = None
|
md5sum_old = None
|
||||||
if os.path.isfile(plugin_file):
|
if os.path.isfile(plugin_file):
|
||||||
# Make the checksum of the currently installed plugin
|
# Make the checksum of the currently installed plugin
|
||||||
md5sum_old = hashlib.md5(
|
with open(plugin_file, 'rb') as md5_plugin_fh:
|
||||||
open(plugin_file, 'rb').read()).hexdigest()
|
md5_plugin_content = md5_plugin_fh.read()
|
||||||
|
md5sum_old = hashlib.md5(md5_plugin_content).hexdigest()
|
||||||
|
|
||||||
if self.params['version'] in [None, 'latest']:
|
if self.params['version'] in [None, 'latest']:
|
||||||
# Take latest version
|
# Take latest version
|
||||||
|
@ -482,7 +483,9 @@ class JenkinsPlugin(object):
|
||||||
plugin_data = self._download_updates()
|
plugin_data = self._download_updates()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sha1_old = hashlib.sha1(open(plugin_file, 'rb').read())
|
with open(plugin_file, 'rb') as sha1_plugin_fh:
|
||||||
|
sha1_plugin_content = sha1_plugin_fh.read()
|
||||||
|
sha1_old = hashlib.sha1(sha1_plugin_content)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(
|
self.module.fail_json(
|
||||||
msg="Cannot calculate SHA1 of the old plugin.",
|
msg="Cannot calculate SHA1 of the old plugin.",
|
||||||
|
|
|
@ -52,7 +52,8 @@ class ActionModule(ActionBase):
|
||||||
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fragment_content = open(self._loader.get_real_file(fragment, decrypt=decrypt), 'rb').read()
|
with open(self._loader.get_real_file(fragment, decrypt=decrypt), 'rb') as fragment_fh:
|
||||||
|
fragment_content = fragment_fh.read()
|
||||||
|
|
||||||
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
# always put a newline between fragments if the previous fragment didn't end with a newline.
|
||||||
if add_newline:
|
if add_newline:
|
||||||
|
|
|
@ -86,7 +86,8 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
out_path = self._normalize_path(out_path, '/')
|
out_path = self._normalize_path(out_path, '/')
|
||||||
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
||||||
content = open(in_path).read()
|
with open(in_path) as in_fh:
|
||||||
|
content = in_fh.read()
|
||||||
self.client.cmd(self.host, 'file.write', [out_path, content])
|
self.client.cmd(self.host, 'file.write', [out_path, content])
|
||||||
|
|
||||||
# TODO test it
|
# TODO test it
|
||||||
|
|
|
@ -93,7 +93,10 @@ def parse_cli(output, tmpl):
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
raise AnsibleError(to_native(exc))
|
raise AnsibleError(to_native(exc))
|
||||||
|
|
||||||
spec = yaml.safe_load(open(tmpl).read())
|
with open(tmpl) as tmpl_fh:
|
||||||
|
tmpl_content = tmpl_fh.read()
|
||||||
|
|
||||||
|
spec = yaml.safe_load(tmpl_content)
|
||||||
obj = {}
|
obj = {}
|
||||||
|
|
||||||
for name, attrs in iteritems(spec['keys']):
|
for name, attrs in iteritems(spec['keys']):
|
||||||
|
@ -330,7 +333,10 @@ def parse_xml(output, tmpl):
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
raise AnsibleError(to_native(exc))
|
raise AnsibleError(to_native(exc))
|
||||||
|
|
||||||
spec = yaml.safe_load(open(tmpl).read())
|
with open(tmpl) as tmpl_fh:
|
||||||
|
tmpl_content = tmpl_fh.read()
|
||||||
|
|
||||||
|
spec = yaml.safe_load(tmpl_content)
|
||||||
obj = {}
|
obj = {}
|
||||||
|
|
||||||
for name, attrs in iteritems(spec['keys']):
|
for name, attrs in iteritems(spec['keys']):
|
||||||
|
|
Loading…
Reference in a new issue