switched threading to multiprocessing

both really work the same for the Lock but this hopefully will
avoid confusing people into thinking we are threaded or thread safe
Also did pyflakes cleanup and made note why checksums import exists
This commit is contained in:
Brian Coca 2016-02-11 22:10:01 -05:00
parent d99955596e
commit 4a043d5e82
3 changed files with 8 additions and 18 deletions

View file

@ -19,8 +19,8 @@ import collections
import os import os
import sys import sys
import time import time
import threading
from itertools import chain from itertools import chain
from multiprocessing import Lock
from ansible import constants as C from ansible import constants as C
from ansible.cache.base import BaseCacheModule from ansible.cache.base import BaseCacheModule
@ -51,7 +51,7 @@ class ProxyClientPool(object):
self._num_connections = 0 self._num_connections = 0
self._available_connections = collections.deque(maxlen=self.max_connections) self._available_connections = collections.deque(maxlen=self.max_connections)
self._locked_connections = set() self._locked_connections = set()
self._lock = threading.Lock() self._lock = Lock()
def _check_safe(self): def _check_safe(self):
if self.pid != os.getpid(): if self.pid != os.getpid():

View file

@ -15,13 +15,12 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import json
import os import os
import base64 import base64
import socket import socket
import struct import struct
import time import time
import threading from multiprocessing import Lock
from ansible.callbacks import vvv, vvvv from ansible.callbacks import vvv, vvvv
from ansible.errors import AnsibleError, AnsibleFileNotFound from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.runner.connection_plugins.ssh import Connection as SSHConnection from ansible.runner.connection_plugins.ssh import Connection as SSHConnection
@ -36,7 +35,7 @@ from ansible import constants
# multiple of the value to speed up file reads. # multiple of the value to speed up file reads.
CHUNK_SIZE=1044*20 CHUNK_SIZE=1044*20
_LOCK = threading.Lock() _LOCK = Lock()
class Connection(object): class Connection(object):
''' raw socket accelerated connection ''' ''' raw socket accelerated connection '''
@ -243,7 +242,7 @@ class Connection(object):
''' run a command on the remote host ''' ''' run a command on the remote host '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported: if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method) raise AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data: if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining") raise AnsibleError("Internal Error: this module does not support optimized module pipelining")

View file

@ -19,9 +19,7 @@ import errno
import sys import sys
import re import re
import os import os
import shlex
import yaml import yaml
import copy
import optparse import optparse
import operator import operator
from ansible import errors from ansible import errors
@ -29,8 +27,7 @@ from ansible import __version__
from ansible.utils.display_functions import * from ansible.utils.display_functions import *
from ansible.utils.plugins import * from ansible.utils.plugins import *
from ansible.utils.su_prompts import * from ansible.utils.su_prompts import *
from ansible.utils.hashing import secure_hash, secure_hash_s, checksum, checksum_s, md5, md5s from ansible.utils.hashing import secure_hash, secure_hash_s, checksum, checksum_s, md5, md5s #unused here but 'reexported'
from ansible.callbacks import display
from ansible.module_utils.splitter import split_args, unquote from ansible.module_utils.splitter import split_args, unquote
from ansible.module_utils.basic import heuristic_log_sanitize from ansible.module_utils.basic import heuristic_log_sanitize
from ansible.utils.unicode import to_bytes, to_unicode from ansible.utils.unicode import to_bytes, to_unicode
@ -45,12 +42,11 @@ import pipes
import random import random
import difflib import difflib
import warnings import warnings
import traceback
import getpass import getpass
import subprocess import subprocess
import contextlib import contextlib
import threading
import tempfile import tempfile
from multiprocessing import Lock
from vault import VaultLib from vault import VaultLib
@ -64,7 +60,7 @@ LOOKUP_REGEX = re.compile(r'lookup\s*\(')
PRINT_CODE_REGEX = re.compile(r'(?:{[{%]|[%}]})') PRINT_CODE_REGEX = re.compile(r'(?:{[{%]|[%}]})')
CODE_REGEX = re.compile(r'(?:{%|%})') CODE_REGEX = re.compile(r'(?:{%|%})')
_LOCK = threading.Lock() _LOCK = Lock()
try: try:
# simplejson can be much faster if it's available # simplejson can be much faster if it's available
@ -357,9 +353,6 @@ def path_dwim_relative(original, dirname, source, playbook_base, check=True):
''' find one file in a directory one level up in a dir named dirname relative to current ''' ''' find one file in a directory one level up in a dir named dirname relative to current '''
# (used by roles code) # (used by roles code)
from ansible.utils import template
basedir = os.path.dirname(original) basedir = os.path.dirname(original)
if os.path.islink(basedir): if os.path.islink(basedir):
basedir = unfrackpath(basedir) basedir = unfrackpath(basedir)
@ -552,8 +545,6 @@ def _clean_data_struct(orig_data, from_remote=False, from_inventory=False):
def parse_json(raw_data, from_remote=False, from_inventory=False, no_exceptions=False): def parse_json(raw_data, from_remote=False, from_inventory=False, no_exceptions=False):
''' this version for module return data only ''' ''' this version for module return data only '''
orig_data = raw_data
# ignore stuff like tcgetattr spewage or other warnings # ignore stuff like tcgetattr spewage or other warnings
data = filter_leading_non_json_lines(raw_data) data = filter_leading_non_json_lines(raw_data)