Add mitogen plugin
This commit is contained in:
0
mitogen/ansible_mitogen/__init__.py
Normal file
0
mitogen/ansible_mitogen/__init__.py
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/__init__.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/__init__.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/affinity.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/affinity.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/connection.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/connection.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/loaders.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/loaders.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/logging.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/logging.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/mixins.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/mixins.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/module_finder.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/module_finder.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/parsing.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/parsing.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/planner.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/planner.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/process.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/process.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/runner.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/runner.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/services.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/services.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/strategy.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/strategy.cpython-36.pyc
Normal file
Binary file not shown.
BIN
mitogen/ansible_mitogen/__pycache__/target.cpython-36.pyc
Normal file
BIN
mitogen/ansible_mitogen/__pycache__/target.cpython-36.pyc
Normal file
Binary file not shown.
Binary file not shown.
286
mitogen/ansible_mitogen/affinity.py
Normal file
286
mitogen/ansible_mitogen/affinity.py
Normal file
@ -0,0 +1,286 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
As Mitogen separates asynchronous IO out to a broker thread, communication
|
||||
necessarily involves context switching and waking that thread. When application
|
||||
threads and the broker share a CPU, this can be almost invisibly fast - around
|
||||
25 microseconds for a full A->B->A round-trip.
|
||||
|
||||
However when threads are scheduled on different CPUs, round-trip delays
|
||||
regularly vary wildly, and easily into milliseconds. Many contributing factors
|
||||
exist, not least scenarios like:
|
||||
|
||||
1. A is preempted immediately after waking B, but before releasing the GIL.
|
||||
2. B wakes from IO wait only to immediately enter futex wait.
|
||||
3. A may wait 10ms or more for another timeslice, as the scheduler on its CPU
|
||||
runs threads unrelated to its transaction (i.e. not B), wake only to release
|
||||
its GIL, before entering IO sleep waiting for a reply from B, which cannot
|
||||
exist yet.
|
||||
4. B wakes, acquires GIL, performs work, and sends reply to A, causing it to
|
||||
wake. B is preempted before releasing GIL.
|
||||
5. A wakes from IO wait only to immediately enter futex wait.
|
||||
6. B may wait 10ms or more for another timeslice, wake only to release its GIL,
|
||||
before sleeping again.
|
||||
7. A wakes, acquires GIL, finally receives reply.
|
||||
|
||||
Per above if we are unlucky, on an even moderately busy machine it is possible
|
||||
to lose milliseconds just in scheduling delay, and the effect is compounded
|
||||
when pairs of threads in process A are communicating with pairs of threads in
|
||||
process B using the same scheme, such as when Ansible WorkerProcess is
|
||||
communicating with ContextService in the connection multiplexer. In the worst
|
||||
case it could involve 4 threads working in lockstep spread across 4 busy CPUs.
|
||||
|
||||
Since multithreading in Python is essentially useless except for waiting on IO
|
||||
due to the presence of the GIL, at least in Ansible there is no good reason for
|
||||
threads in the same process to run on distinct CPUs - they always operate in
|
||||
lockstep due to the GIL, and are thus vulnerable to issues like above.
|
||||
|
||||
Linux lacks any natural API to describe what we want, it only permits
|
||||
individual threads to be constrained to run on specific CPUs, and for that
|
||||
constraint to be inherited by new threads and forks of the constrained thread.
|
||||
|
||||
This module therefore implements a CPU pinning policy for Ansible processes,
|
||||
providing methods that should be called early in any new process, either to
|
||||
rebalance which CPU it is pinned to, or in the case of subprocesses, to remove
|
||||
the pinning entirely. It is likely to require ongoing tweaking, since pinning
|
||||
necessarily involves preventing the scheduler from making load balancing
|
||||
decisions.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
import ctypes
|
||||
import logging
|
||||
import mmap
|
||||
import multiprocessing
|
||||
import os
|
||||
import struct
|
||||
|
||||
import mitogen.core
|
||||
import mitogen.parent
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
try:
|
||||
_libc = ctypes.CDLL(None, use_errno=True)
|
||||
_strerror = _libc.strerror
|
||||
_strerror.restype = ctypes.c_char_p
|
||||
_sem_init = _libc.sem_init
|
||||
_sem_wait = _libc.sem_wait
|
||||
_sem_post = _libc.sem_post
|
||||
_sched_setaffinity = _libc.sched_setaffinity
|
||||
except (OSError, AttributeError):
|
||||
_libc = None
|
||||
_strerror = None
|
||||
_sem_init = None
|
||||
_sem_wait = None
|
||||
_sem_post = None
|
||||
_sched_setaffinity = None
|
||||
|
||||
|
||||
class sem_t(ctypes.Structure):
|
||||
"""
|
||||
Wrap sem_t to allow storing a lock in shared memory.
|
||||
"""
|
||||
_fields_ = [
|
||||
('data', ctypes.c_uint8 * 128),
|
||||
]
|
||||
|
||||
def init(self):
|
||||
if _sem_init(self.data, 1, 1):
|
||||
raise Exception(_strerror(ctypes.get_errno()))
|
||||
|
||||
def acquire(self):
|
||||
if _sem_wait(self.data):
|
||||
raise Exception(_strerror(ctypes.get_errno()))
|
||||
|
||||
def release(self):
|
||||
if _sem_post(self.data):
|
||||
raise Exception(_strerror(ctypes.get_errno()))
|
||||
|
||||
|
||||
class State(ctypes.Structure):
|
||||
"""
|
||||
Contents of shared memory segment. This allows :meth:`Manager.assign` to be
|
||||
called from any child, since affinity assignment must happen from within
|
||||
the context of the new child process.
|
||||
"""
|
||||
_fields_ = [
|
||||
('lock', sem_t),
|
||||
('counter', ctypes.c_uint8),
|
||||
]
|
||||
|
||||
|
||||
class Policy(object):
|
||||
"""
|
||||
Process affinity policy.
|
||||
"""
|
||||
def assign_controller(self):
|
||||
"""
|
||||
Assign the Ansible top-level policy to this process.
|
||||
"""
|
||||
|
||||
def assign_muxprocess(self, index):
|
||||
"""
|
||||
Assign the MuxProcess policy to this process.
|
||||
"""
|
||||
|
||||
def assign_worker(self):
|
||||
"""
|
||||
Assign the WorkerProcess policy to this process.
|
||||
"""
|
||||
|
||||
def assign_subprocess(self):
|
||||
"""
|
||||
Assign the helper subprocess policy to this process.
|
||||
"""
|
||||
|
||||
class FixedPolicy(Policy):
|
||||
"""
|
||||
:class:`Policy` for machines where the only control method available is
|
||||
fixed CPU placement. The scheme here was tested on an otherwise idle 16
|
||||
thread machine.
|
||||
|
||||
- The connection multiplexer is pinned to CPU 0.
|
||||
- The Ansible top-level (strategy) is pinned to CPU 1.
|
||||
- WorkerProcesses are pinned sequentually to 2..N, wrapping around when no
|
||||
more CPUs exist.
|
||||
- Children such as SSH may be scheduled on any CPU except 0/1.
|
||||
|
||||
If the machine has less than 4 cores available, the top-level and workers
|
||||
are pinned between CPU 2..N, i.e. no CPU is reserved for the top-level
|
||||
process.
|
||||
|
||||
This could at least be improved by having workers pinned to independent
|
||||
cores, before reusing the second hyperthread of an existing core.
|
||||
|
||||
A hook is installed that causes :meth:`reset` to run in the child of any
|
||||
process created with :func:`mitogen.parent.popen`, ensuring CPU-intensive
|
||||
children like SSH are not forced to share the same core as the (otherwise
|
||||
potentially very busy) parent.
|
||||
"""
|
||||
def __init__(self, cpu_count=None):
|
||||
#: For tests.
|
||||
self.cpu_count = cpu_count or multiprocessing.cpu_count()
|
||||
self.mem = mmap.mmap(-1, 4096)
|
||||
self.state = State.from_buffer(self.mem)
|
||||
self.state.lock.init()
|
||||
|
||||
if self.cpu_count < 2:
|
||||
# uniprocessor
|
||||
self._reserve_mux = False
|
||||
self._reserve_controller = False
|
||||
self._reserve_mask = 0
|
||||
self._reserve_shift = 0
|
||||
elif self.cpu_count < 4:
|
||||
# small SMP
|
||||
self._reserve_mux = True
|
||||
self._reserve_controller = False
|
||||
self._reserve_mask = 1
|
||||
self._reserve_shift = 1
|
||||
else:
|
||||
# big SMP
|
||||
self._reserve_mux = True
|
||||
self._reserve_controller = True
|
||||
self._reserve_mask = 3
|
||||
self._reserve_shift = 2
|
||||
|
||||
def _set_affinity(self, descr, mask):
|
||||
if descr:
|
||||
LOG.debug('CPU mask for %s: %#08x', descr, mask)
|
||||
mitogen.parent._preexec_hook = self._clear
|
||||
self._set_cpu_mask(mask)
|
||||
|
||||
def _balance(self, descr):
|
||||
self.state.lock.acquire()
|
||||
try:
|
||||
n = self.state.counter
|
||||
self.state.counter += 1
|
||||
finally:
|
||||
self.state.lock.release()
|
||||
|
||||
self._set_cpu(descr, self._reserve_shift + (
|
||||
(n % (self.cpu_count - self._reserve_shift))
|
||||
))
|
||||
|
||||
def _set_cpu(self, descr, cpu):
|
||||
self._set_affinity(descr, 1 << (cpu % self.cpu_count))
|
||||
|
||||
def _clear(self):
|
||||
all_cpus = (1 << self.cpu_count) - 1
|
||||
self._set_affinity(None, all_cpus & ~self._reserve_mask)
|
||||
|
||||
def assign_controller(self):
|
||||
if self._reserve_controller:
|
||||
self._set_cpu('Ansible top-level process', 1)
|
||||
else:
|
||||
self._balance('Ansible top-level process')
|
||||
|
||||
def assign_muxprocess(self, index):
|
||||
self._set_cpu('MuxProcess %d' % (index,), index)
|
||||
|
||||
def assign_worker(self):
|
||||
self._balance('WorkerProcess')
|
||||
|
||||
def assign_subprocess(self):
|
||||
self._clear()
|
||||
|
||||
|
||||
class LinuxPolicy(FixedPolicy):
|
||||
def _mask_to_bytes(self, mask):
|
||||
"""
|
||||
Convert the (type long) mask to a cpu_set_t.
|
||||
"""
|
||||
chunks = []
|
||||
shiftmask = (2 ** 64) - 1
|
||||
for x in range(16):
|
||||
chunks.append(struct.pack('<Q', mask & shiftmask))
|
||||
mask >>= 64
|
||||
return mitogen.core.b('').join(chunks)
|
||||
|
||||
def _get_thread_ids(self):
|
||||
try:
|
||||
ents = os.listdir('/proc/self/task')
|
||||
except OSError:
|
||||
LOG.debug('cannot fetch thread IDs for current process')
|
||||
return [os.getpid()]
|
||||
|
||||
return [int(s) for s in ents if s.isdigit()]
|
||||
|
||||
def _set_cpu_mask(self, mask):
|
||||
s = self._mask_to_bytes(mask)
|
||||
for tid in self._get_thread_ids():
|
||||
_sched_setaffinity(tid, len(s), s)
|
||||
|
||||
|
||||
if _sched_setaffinity is not None:
|
||||
policy = LinuxPolicy()
|
||||
else:
|
||||
policy = Policy()
|
0
mitogen/ansible_mitogen/compat/__init__.py
Normal file
0
mitogen/ansible_mitogen/compat/__init__.py
Normal file
318
mitogen/ansible_mitogen/compat/simplejson/__init__.py
Normal file
318
mitogen/ansible_mitogen/compat/simplejson/__init__.py
Normal file
@ -0,0 +1,318 @@
|
||||
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
|
||||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
||||
interchange format.
|
||||
|
||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
||||
compatibility with Python 2.4 and Python 2.5 and (currently) has
|
||||
significant performance advantages, even without using the optional C
|
||||
extension for speedups.
|
||||
|
||||
Encoding basic Python object hierarchies::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
||||
>>> print json.dumps("\"foo\bar")
|
||||
"\"foo\bar"
|
||||
>>> print json.dumps(u'\u1234')
|
||||
"\u1234"
|
||||
>>> print json.dumps('\\')
|
||||
"\\"
|
||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
||||
{"a": 0, "b": 0, "c": 0}
|
||||
>>> from StringIO import StringIO
|
||||
>>> io = StringIO()
|
||||
>>> json.dump(['streaming API'], io)
|
||||
>>> io.getvalue()
|
||||
'["streaming API"]'
|
||||
|
||||
Compact encoding::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
||||
'[1,2,3,{"4":5,"6":7}]'
|
||||
|
||||
Pretty printing::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
|
||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
||||
{
|
||||
"4": 5,
|
||||
"6": 7
|
||||
}
|
||||
|
||||
Decoding JSON::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
||||
True
|
||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
||||
True
|
||||
>>> from StringIO import StringIO
|
||||
>>> io = StringIO('["streaming API"]')
|
||||
>>> json.load(io)[0] == 'streaming API'
|
||||
True
|
||||
|
||||
Specializing JSON object decoding::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> def as_complex(dct):
|
||||
... if '__complex__' in dct:
|
||||
... return complex(dct['real'], dct['imag'])
|
||||
... return dct
|
||||
...
|
||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
||||
... object_hook=as_complex)
|
||||
(1+2j)
|
||||
>>> import decimal
|
||||
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
|
||||
True
|
||||
|
||||
Specializing JSON object encoding::
|
||||
|
||||
>>> import simplejson as json
|
||||
>>> def encode_complex(obj):
|
||||
... if isinstance(obj, complex):
|
||||
... return [obj.real, obj.imag]
|
||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
||||
...
|
||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
||||
'[2.0, 1.0]'
|
||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
||||
'[2.0, 1.0]'
|
||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
||||
'[2.0, 1.0]'
|
||||
|
||||
|
||||
Using simplejson.tool from the shell to validate and pretty-print::
|
||||
|
||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
||||
{
|
||||
"json": "obj"
|
||||
}
|
||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
||||
Expecting property name: line 1 column 2 (char 2)
|
||||
"""
|
||||
__version__ = '2.0.9'
|
||||
__all__ = [
|
||||
'dump', 'dumps', 'load', 'loads',
|
||||
'JSONDecoder', 'JSONEncoder',
|
||||
]
|
||||
|
||||
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
||||
|
||||
from decoder import JSONDecoder
|
||||
from encoder import JSONEncoder
|
||||
|
||||
_default_encoder = JSONEncoder(
|
||||
skipkeys=False,
|
||||
ensure_ascii=True,
|
||||
check_circular=True,
|
||||
allow_nan=True,
|
||||
indent=None,
|
||||
separators=None,
|
||||
encoding='utf-8',
|
||||
default=None,
|
||||
)
|
||||
|
||||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
||||
allow_nan=True, cls=None, indent=None, separators=None,
|
||||
encoding='utf-8', default=None, **kw):
|
||||
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
||||
``.write()``-supporting file-like object).
|
||||
|
||||
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
||||
will be skipped instead of raising a ``TypeError``.
|
||||
|
||||
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
|
||||
may be ``unicode`` instances, subject to normal Python ``str`` to
|
||||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
||||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
||||
to cause an error.
|
||||
|
||||
If ``check_circular`` is false, then the circular reference check
|
||||
for container types will be skipped and a circular reference will
|
||||
result in an ``OverflowError`` (or worse).
|
||||
|
||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
||||
in strict compliance of the JSON specification, instead of using the
|
||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
||||
|
||||
If ``indent`` is a non-negative integer, then JSON array elements and object
|
||||
members will be pretty-printed with that indent level. An indent level
|
||||
of 0 will only insert newlines. ``None`` is the most compact representation.
|
||||
|
||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
||||
``(',', ':')`` is the most compact JSON representation.
|
||||
|
||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
||||
|
||||
``default(obj)`` is a function that should return a serializable version
|
||||
of obj or raise TypeError. The default simply raises TypeError.
|
||||
|
||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
||||
``.default()`` method to serialize additional types), specify it with
|
||||
the ``cls`` kwarg.
|
||||
|
||||
"""
|
||||
# cached encoder
|
||||
if (not skipkeys and ensure_ascii and
|
||||
check_circular and allow_nan and
|
||||
cls is None and indent is None and separators is None and
|
||||
encoding == 'utf-8' and default is None and not kw):
|
||||
iterable = _default_encoder.iterencode(obj)
|
||||
else:
|
||||
if cls is None:
|
||||
cls = JSONEncoder
|
||||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
||||
separators=separators, encoding=encoding,
|
||||
default=default, **kw).iterencode(obj)
|
||||
# could accelerate with writelines in some versions of Python, at
|
||||
# a debuggability cost
|
||||
for chunk in iterable:
|
||||
fp.write(chunk)
|
||||
|
||||
|
||||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
||||
allow_nan=True, cls=None, indent=None, separators=None,
|
||||
encoding='utf-8', default=None, **kw):
|
||||
"""Serialize ``obj`` to a JSON formatted ``str``.
|
||||
|
||||
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
||||
will be skipped instead of raising a ``TypeError``.
|
||||
|
||||
If ``ensure_ascii`` is false, then the return value will be a
|
||||
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
||||
coercion rules instead of being escaped to an ASCII ``str``.
|
||||
|
||||
If ``check_circular`` is false, then the circular reference check
|
||||
for container types will be skipped and a circular reference will
|
||||
result in an ``OverflowError`` (or worse).
|
||||
|
||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
||||
strict compliance of the JSON specification, instead of using the
|
||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
||||
|
||||
If ``indent`` is a non-negative integer, then JSON array elements and
|
||||
object members will be pretty-printed with that indent level. An indent
|
||||
level of 0 will only insert newlines. ``None`` is the most compact
|
||||
representation.
|
||||
|
||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
||||
``(',', ':')`` is the most compact JSON representation.
|
||||
|
||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
||||
|
||||
``default(obj)`` is a function that should return a serializable version
|
||||
of obj or raise TypeError. The default simply raises TypeError.
|
||||
|
||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
||||
``.default()`` method to serialize additional types), specify it with
|
||||
the ``cls`` kwarg.
|
||||
|
||||
"""
|
||||
# cached encoder
|
||||
if (not skipkeys and ensure_ascii and
|
||||
check_circular and allow_nan and
|
||||
cls is None and indent is None and separators is None and
|
||||
encoding == 'utf-8' and default is None and not kw):
|
||||
return _default_encoder.encode(obj)
|
||||
if cls is None:
|
||||
cls = JSONEncoder
|
||||
return cls(
|
||||
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
||||
separators=separators, encoding=encoding, default=default,
|
||||
**kw).encode(obj)
|
||||
|
||||
|
||||
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
|
||||
|
||||
|
||||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, **kw):
|
||||
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
||||
a JSON document) to a Python object.
|
||||
|
||||
If the contents of ``fp`` is encoded with an ASCII based encoding other
|
||||
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
|
||||
be specified. Encodings that are not ASCII based (such as UCS-2) are
|
||||
not allowed, and should be wrapped with
|
||||
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
|
||||
object and passed to ``loads()``
|
||||
|
||||
``object_hook`` is an optional function that will be called with the
|
||||
result of any object literal decode (a ``dict``). The return value of
|
||||
``object_hook`` will be used instead of the ``dict``. This feature
|
||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
||||
|
||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
||||
kwarg.
|
||||
|
||||
"""
|
||||
return loads(fp.read(),
|
||||
encoding=encoding, cls=cls, object_hook=object_hook,
|
||||
parse_float=parse_float, parse_int=parse_int,
|
||||
parse_constant=parse_constant, **kw)
|
||||
|
||||
|
||||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, **kw):
|
||||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
||||
document) to a Python object.
|
||||
|
||||
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
|
||||
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
|
||||
must be specified. Encodings that are not ASCII based (such as UCS-2)
|
||||
are not allowed and should be decoded to ``unicode`` first.
|
||||
|
||||
``object_hook`` is an optional function that will be called with the
|
||||
result of any object literal decode (a ``dict``). The return value of
|
||||
``object_hook`` will be used instead of the ``dict``. This feature
|
||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
||||
|
||||
``parse_float``, if specified, will be called with the string
|
||||
of every JSON float to be decoded. By default this is equivalent to
|
||||
float(num_str). This can be used to use another datatype or parser
|
||||
for JSON floats (e.g. decimal.Decimal).
|
||||
|
||||
``parse_int``, if specified, will be called with the string
|
||||
of every JSON int to be decoded. By default this is equivalent to
|
||||
int(num_str). This can be used to use another datatype or parser
|
||||
for JSON integers (e.g. float).
|
||||
|
||||
``parse_constant``, if specified, will be called with one of the
|
||||
following strings: -Infinity, Infinity, NaN, null, true, false.
|
||||
This can be used to raise an exception if invalid JSON numbers
|
||||
are encountered.
|
||||
|
||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
||||
kwarg.
|
||||
|
||||
"""
|
||||
if (cls is None and encoding is None and object_hook is None and
|
||||
parse_int is None and parse_float is None and
|
||||
parse_constant is None and not kw):
|
||||
return _default_decoder.decode(s)
|
||||
if cls is None:
|
||||
cls = JSONDecoder
|
||||
if object_hook is not None:
|
||||
kw['object_hook'] = object_hook
|
||||
if parse_float is not None:
|
||||
kw['parse_float'] = parse_float
|
||||
if parse_int is not None:
|
||||
kw['parse_int'] = parse_int
|
||||
if parse_constant is not None:
|
||||
kw['parse_constant'] = parse_constant
|
||||
return cls(encoding=encoding, **kw).decode(s)
|
354
mitogen/ansible_mitogen/compat/simplejson/decoder.py
Normal file
354
mitogen/ansible_mitogen/compat/simplejson/decoder.py
Normal file
@ -0,0 +1,354 @@
|
||||
"""Implementation of JSONDecoder
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
import struct
|
||||
|
||||
from simplejson.scanner import make_scanner
|
||||
try:
|
||||
from simplejson._speedups import scanstring as c_scanstring
|
||||
except ImportError:
|
||||
c_scanstring = None
|
||||
|
||||
__all__ = ['JSONDecoder']
|
||||
|
||||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
||||
|
||||
def _floatconstants():
|
||||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
|
||||
if sys.byteorder != 'big':
|
||||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
||||
nan, inf = struct.unpack('dd', _BYTES)
|
||||
return nan, inf, -inf
|
||||
|
||||
NaN, PosInf, NegInf = _floatconstants()
|
||||
|
||||
|
||||
def linecol(doc, pos):
|
||||
lineno = doc.count('\n', 0, pos) + 1
|
||||
if lineno == 1:
|
||||
colno = pos
|
||||
else:
|
||||
colno = pos - doc.rindex('\n', 0, pos)
|
||||
return lineno, colno
|
||||
|
||||
|
||||
def errmsg(msg, doc, pos, end=None):
|
||||
# Note that this function is called from _speedups
|
||||
lineno, colno = linecol(doc, pos)
|
||||
if end is None:
|
||||
#fmt = '{0}: line {1} column {2} (char {3})'
|
||||
#return fmt.format(msg, lineno, colno, pos)
|
||||
fmt = '%s: line %d column %d (char %d)'
|
||||
return fmt % (msg, lineno, colno, pos)
|
||||
endlineno, endcolno = linecol(doc, end)
|
||||
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
|
||||
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
|
||||
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
|
||||
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
|
||||
|
||||
|
||||
_CONSTANTS = {
|
||||
'-Infinity': NegInf,
|
||||
'Infinity': PosInf,
|
||||
'NaN': NaN,
|
||||
}
|
||||
|
||||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
||||
BACKSLASH = {
|
||||
'"': u'"', '\\': u'\\', '/': u'/',
|
||||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
|
||||
}
|
||||
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
|
||||
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
|
||||
"""Scan the string s for a JSON string. End is the index of the
|
||||
character in s after the quote that started the JSON string.
|
||||
Unescapes all valid JSON string escape sequences and raises ValueError
|
||||
on attempt to decode an invalid string. If strict is False then literal
|
||||
control characters are allowed in the string.
|
||||
|
||||
Returns a tuple of the decoded string and the index of the character in s
|
||||
after the end quote."""
|
||||
if encoding is None:
|
||||
encoding = DEFAULT_ENCODING
|
||||
chunks = []
|
||||
_append = chunks.append
|
||||
begin = end - 1
|
||||
while 1:
|
||||
chunk = _m(s, end)
|
||||
if chunk is None:
|
||||
raise ValueError(
|
||||
errmsg("Unterminated string starting at", s, begin))
|
||||
end = chunk.end()
|
||||
content, terminator = chunk.groups()
|
||||
# Content is contains zero or more unescaped string characters
|
||||
if content:
|
||||
if not isinstance(content, unicode):
|
||||
content = unicode(content, encoding)
|
||||
_append(content)
|
||||
# Terminator is the end of string, a literal control character,
|
||||
# or a backslash denoting that an escape sequence follows
|
||||
if terminator == '"':
|
||||
break
|
||||
elif terminator != '\\':
|
||||
if strict:
|
||||
msg = "Invalid control character %r at" % (terminator,)
|
||||
#msg = "Invalid control character {0!r} at".format(terminator)
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
else:
|
||||
_append(terminator)
|
||||
continue
|
||||
try:
|
||||
esc = s[end]
|
||||
except IndexError:
|
||||
raise ValueError(
|
||||
errmsg("Unterminated string starting at", s, begin))
|
||||
# If not a unicode escape sequence, must be in the lookup table
|
||||
if esc != 'u':
|
||||
try:
|
||||
char = _b[esc]
|
||||
except KeyError:
|
||||
msg = "Invalid \\escape: " + repr(esc)
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
end += 1
|
||||
else:
|
||||
# Unicode escape sequence
|
||||
esc = s[end + 1:end + 5]
|
||||
next_end = end + 5
|
||||
if len(esc) != 4:
|
||||
msg = "Invalid \\uXXXX escape"
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
uni = int(esc, 16)
|
||||
# Check for surrogate pair on UCS-4 systems
|
||||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
|
||||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
|
||||
if not s[end + 5:end + 7] == '\\u':
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
esc2 = s[end + 7:end + 11]
|
||||
if len(esc2) != 4:
|
||||
raise ValueError(errmsg(msg, s, end))
|
||||
uni2 = int(esc2, 16)
|
||||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
||||
next_end += 6
|
||||
char = unichr(uni)
|
||||
end = next_end
|
||||
# Append the unescaped character
|
||||
_append(char)
|
||||
return u''.join(chunks), end
|
||||
|
||||
|
||||
# Use speedup if available
|
||||
scanstring = c_scanstring or py_scanstring
|
||||
|
||||
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
||||
WHITESPACE_STR = ' \t\n\r'
|
||||
|
||||
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
||||
pairs = {}
|
||||
# Use a slice to prevent IndexError from being raised, the following
|
||||
# check will raise a more specific ValueError if the string is empty
|
||||
nextchar = s[end:end + 1]
|
||||
# Normally we expect nextchar == '"'
|
||||
if nextchar != '"':
|
||||
if nextchar in _ws:
|
||||
end = _w(s, end).end()
|
||||
nextchar = s[end:end + 1]
|
||||
# Trivial empty object
|
||||
if nextchar == '}':
|
||||
return pairs, end + 1
|
||||
elif nextchar != '"':
|
||||
raise ValueError(errmsg("Expecting property name", s, end))
|
||||
end += 1
|
||||
while True:
|
||||
key, end = scanstring(s, end, encoding, strict)
|
||||
|
||||
# To skip some function call overhead we optimize the fast paths where
|
||||
# the JSON key separator is ": " or just ":".
|
||||
if s[end:end + 1] != ':':
|
||||
end = _w(s, end).end()
|
||||
if s[end:end + 1] != ':':
|
||||
raise ValueError(errmsg("Expecting : delimiter", s, end))
|
||||
|
||||
end += 1
|
||||
|
||||
try:
|
||||
if s[end] in _ws:
|
||||
end += 1
|
||||
if s[end] in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
value, end = scan_once(s, end)
|
||||
except StopIteration:
|
||||
raise ValueError(errmsg("Expecting object", s, end))
|
||||
pairs[key] = value
|
||||
|
||||
try:
|
||||
nextchar = s[end]
|
||||
if nextchar in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
nextchar = s[end]
|
||||
except IndexError:
|
||||
nextchar = ''
|
||||
end += 1
|
||||
|
||||
if nextchar == '}':
|
||||
break
|
||||
elif nextchar != ',':
|
||||
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
|
||||
|
||||
try:
|
||||
nextchar = s[end]
|
||||
if nextchar in _ws:
|
||||
end += 1
|
||||
nextchar = s[end]
|
||||
if nextchar in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
nextchar = s[end]
|
||||
except IndexError:
|
||||
nextchar = ''
|
||||
|
||||
end += 1
|
||||
if nextchar != '"':
|
||||
raise ValueError(errmsg("Expecting property name", s, end - 1))
|
||||
|
||||
if object_hook is not None:
|
||||
pairs = object_hook(pairs)
|
||||
return pairs, end
|
||||
|
||||
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
||||
values = []
|
||||
nextchar = s[end:end + 1]
|
||||
if nextchar in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
nextchar = s[end:end + 1]
|
||||
# Look-ahead for trivial empty array
|
||||
if nextchar == ']':
|
||||
return values, end + 1
|
||||
_append = values.append
|
||||
while True:
|
||||
try:
|
||||
value, end = scan_once(s, end)
|
||||
except StopIteration:
|
||||
raise ValueError(errmsg("Expecting object", s, end))
|
||||
_append(value)
|
||||
nextchar = s[end:end + 1]
|
||||
if nextchar in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
nextchar = s[end:end + 1]
|
||||
end += 1
|
||||
if nextchar == ']':
|
||||
break
|
||||
elif nextchar != ',':
|
||||
raise ValueError(errmsg("Expecting , delimiter", s, end))
|
||||
|
||||
try:
|
||||
if s[end] in _ws:
|
||||
end += 1
|
||||
if s[end] in _ws:
|
||||
end = _w(s, end + 1).end()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
return values, end
|
||||
|
||||
class JSONDecoder(object):
|
||||
"""Simple JSON <http://json.org> decoder
|
||||
|
||||
Performs the following translations in decoding by default:
|
||||
|
||||
+---------------+-------------------+
|
||||
| JSON | Python |
|
||||
+===============+===================+
|
||||
| object | dict |
|
||||
+---------------+-------------------+
|
||||
| array | list |
|
||||
+---------------+-------------------+
|
||||
| string | unicode |
|
||||
+---------------+-------------------+
|
||||
| number (int) | int, long |
|
||||
+---------------+-------------------+
|
||||
| number (real) | float |
|
||||
+---------------+-------------------+
|
||||
| true | True |
|
||||
+---------------+-------------------+
|
||||
| false | False |
|
||||
+---------------+-------------------+
|
||||
| null | None |
|
||||
+---------------+-------------------+
|
||||
|
||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
||||
their corresponding ``float`` values, which is outside the JSON spec.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
||||
parse_int=None, parse_constant=None, strict=True):
|
||||
"""``encoding`` determines the encoding used to interpret any ``str``
|
||||
objects decoded by this instance (utf-8 by default). It has no
|
||||
effect when decoding ``unicode`` objects.
|
||||
|
||||
Note that currently only encodings that are a superset of ASCII work,
|
||||
strings of other encodings should be passed in as ``unicode``.
|
||||
|
||||
``object_hook``, if specified, will be called with the result
|
||||
of every JSON object decoded and its return value will be used in
|
||||
place of the given ``dict``. This can be used to provide custom
|
||||
deserializations (e.g. to support JSON-RPC class hinting).
|
||||
|
||||
``parse_float``, if specified, will be called with the string
|
||||
of every JSON float to be decoded. By default this is equivalent to
|
||||
float(num_str). This can be used to use another datatype or parser
|
||||
for JSON floats (e.g. decimal.Decimal).
|
||||
|
||||
``parse_int``, if specified, will be called with the string
|
||||
of every JSON int to be decoded. By default this is equivalent to
|
||||
int(num_str). This can be used to use another datatype or parser
|
||||
for JSON integers (e.g. float).
|
||||
|
||||
``parse_constant``, if specified, will be called with one of the
|
||||
following strings: -Infinity, Infinity, NaN.
|
||||
This can be used to raise an exception if invalid JSON numbers
|
||||
are encountered.
|
||||
|
||||
"""
|
||||
self.encoding = encoding
|
||||
self.object_hook = object_hook
|
||||
self.parse_float = parse_float or float
|
||||
self.parse_int = parse_int or int
|
||||
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
||||
self.strict = strict
|
||||
self.parse_object = JSONObject
|
||||
self.parse_array = JSONArray
|
||||
self.parse_string = scanstring
|
||||
self.scan_once = make_scanner(self)
|
||||
|
||||
def decode(self, s, _w=WHITESPACE.match):
|
||||
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
||||
instance containing a JSON document)
|
||||
|
||||
"""
|
||||
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
||||
end = _w(s, end).end()
|
||||
if end != len(s):
|
||||
raise ValueError(errmsg("Extra data", s, end, len(s)))
|
||||
return obj
|
||||
|
||||
def raw_decode(self, s, idx=0):
|
||||
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
|
||||
with a JSON document) and return a 2-tuple of the Python
|
||||
representation and the index in ``s`` where the document ended.
|
||||
|
||||
This can be used to decode a JSON document from a string that may
|
||||
have extraneous data at the end.
|
||||
|
||||
"""
|
||||
try:
|
||||
obj, end = self.scan_once(s, idx)
|
||||
except StopIteration:
|
||||
raise ValueError("No JSON object could be decoded")
|
||||
return obj, end
|
440
mitogen/ansible_mitogen/compat/simplejson/encoder.py
Normal file
440
mitogen/ansible_mitogen/compat/simplejson/encoder.py
Normal file
@ -0,0 +1,440 @@
|
||||
"""Implementation of JSONEncoder
|
||||
"""
|
||||
import re
|
||||
|
||||
try:
|
||||
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
|
||||
except ImportError:
|
||||
c_encode_basestring_ascii = None
|
||||
try:
|
||||
from simplejson._speedups import make_encoder as c_make_encoder
|
||||
except ImportError:
|
||||
c_make_encoder = None
|
||||
|
||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
||||
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
||||
ESCAPE_DCT = {
|
||||
'\\': '\\\\',
|
||||
'"': '\\"',
|
||||
'\b': '\\b',
|
||||
'\f': '\\f',
|
||||
'\n': '\\n',
|
||||
'\r': '\\r',
|
||||
'\t': '\\t',
|
||||
}
|
||||
for i in range(0x20):
|
||||
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
||||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
||||
|
||||
# Assume this produces an infinity on all machines (probably not guaranteed)
|
||||
INFINITY = float('1e66666')
|
||||
FLOAT_REPR = repr
|
||||
|
||||
def encode_basestring(s):
|
||||
"""Return a JSON representation of a Python string
|
||||
|
||||
"""
|
||||
def replace(match):
|
||||
return ESCAPE_DCT[match.group(0)]
|
||||
return '"' + ESCAPE.sub(replace, s) + '"'
|
||||
|
||||
|
||||
def py_encode_basestring_ascii(s):
|
||||
"""Return an ASCII-only JSON representation of a Python string
|
||||
|
||||
"""
|
||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
||||
s = s.decode('utf-8')
|
||||
def replace(match):
|
||||
s = match.group(0)
|
||||
try:
|
||||
return ESCAPE_DCT[s]
|
||||
except KeyError:
|
||||
n = ord(s)
|
||||
if n < 0x10000:
|
||||
#return '\\u{0:04x}'.format(n)
|
||||
return '\\u%04x' % (n,)
|
||||
else:
|
||||
# surrogate pair
|
||||
n -= 0x10000
|
||||
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
||||
s2 = 0xdc00 | (n & 0x3ff)
|
||||
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
||||
return '\\u%04x\\u%04x' % (s1, s2)
|
||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
||||
|
||||
|
||||
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
|
||||
|
||||
class JSONEncoder(object):
|
||||
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
||||
|
||||
Supports the following objects and types by default:
|
||||
|
||||
+-------------------+---------------+
|
||||
| Python | JSON |
|
||||
+===================+===============+
|
||||
| dict | object |
|
||||
+-------------------+---------------+
|
||||
| list, tuple | array |
|
||||
+-------------------+---------------+
|
||||
| str, unicode | string |
|
||||
+-------------------+---------------+
|
||||
| int, long, float | number |
|
||||
+-------------------+---------------+
|
||||
| True | true |
|
||||
+-------------------+---------------+
|
||||
| False | false |
|
||||
+-------------------+---------------+
|
||||
| None | null |
|
||||
+-------------------+---------------+
|
||||
|
||||
To extend this to recognize other objects, subclass and implement a
|
||||
``.default()`` method with another method that returns a serializable
|
||||
object for ``o`` if possible, otherwise it should call the superclass
|
||||
implementation (to raise ``TypeError``).
|
||||
|
||||
"""
|
||||
item_separator = ', '
|
||||
key_separator = ': '
|
||||
def __init__(self, skipkeys=False, ensure_ascii=True,
|
||||
check_circular=True, allow_nan=True, sort_keys=False,
|
||||
indent=None, separators=None, encoding='utf-8', default=None):
|
||||
"""Constructor for JSONEncoder, with sensible defaults.
|
||||
|
||||
If skipkeys is false, then it is a TypeError to attempt
|
||||
encoding of keys that are not str, int, long, float or None. If
|
||||
skipkeys is True, such items are simply skipped.
|
||||
|
||||
If ensure_ascii is true, the output is guaranteed to be str
|
||||
objects with all incoming unicode characters escaped. If
|
||||
ensure_ascii is false, the output will be unicode object.
|
||||
|
||||
If check_circular is true, then lists, dicts, and custom encoded
|
||||
objects will be checked for circular references during encoding to
|
||||
prevent an infinite recursion (which would cause an OverflowError).
|
||||
Otherwise, no such check takes place.
|
||||
|
||||
If allow_nan is true, then NaN, Infinity, and -Infinity will be
|
||||
encoded as such. This behavior is not JSON specification compliant,
|
||||
but is consistent with most JavaScript based encoders and decoders.
|
||||
Otherwise, it will be a ValueError to encode such floats.
|
||||
|
||||
If sort_keys is true, then the output of dictionaries will be
|
||||
sorted by key; this is useful for regression tests to ensure
|
||||
that JSON serializations can be compared on a day-to-day basis.
|
||||
|
||||
If indent is a non-negative integer, then JSON array
|
||||
elements and object members will be pretty-printed with that
|
||||
indent level. An indent level of 0 will only insert newlines.
|
||||
None is the most compact representation.
|
||||
|
||||
If specified, separators should be a (item_separator, key_separator)
|
||||
tuple. The default is (', ', ': '). To get the most compact JSON
|
||||
representation you should specify (',', ':') to eliminate whitespace.
|
||||
|
||||
If specified, default is a function that gets called for objects
|
||||
that can't otherwise be serialized. It should return a JSON encodable
|
||||
version of the object or raise a ``TypeError``.
|
||||
|
||||
If encoding is not None, then all input strings will be
|
||||
transformed into unicode using that encoding prior to JSON-encoding.
|
||||
The default is UTF-8.
|
||||
|
||||
"""
|
||||
|
||||
self.skipkeys = skipkeys
|
||||
self.ensure_ascii = ensure_ascii
|
||||
self.check_circular = check_circular
|
||||
self.allow_nan = allow_nan
|
||||
self.sort_keys = sort_keys
|
||||
self.indent = indent
|
||||
if separators is not None:
|
||||
self.item_separator, self.key_separator = separators
|
||||
if default is not None:
|
||||
self.default = default
|
||||
self.encoding = encoding
|
||||
|
||||
def default(self, o):
|
||||
"""Implement this method in a subclass such that it returns
|
||||
a serializable object for ``o``, or calls the base implementation
|
||||
(to raise a ``TypeError``).
|
||||
|
||||
For example, to support arbitrary iterators, you could
|
||||
implement default like this::
|
||||
|
||||
def default(self, o):
|
||||
try:
|
||||
iterable = iter(o)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
return list(iterable)
|
||||
return JSONEncoder.default(self, o)
|
||||
|
||||
"""
|
||||
raise TypeError(repr(o) + " is not JSON serializable")
|
||||
|
||||
def encode(self, o):
|
||||
"""Return a JSON string representation of a Python data structure.
|
||||
|
||||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
||||
'{"foo": ["bar", "baz"]}'
|
||||
|
||||
"""
|
||||
# This is for extremely simple cases and benchmarks.
|
||||
if isinstance(o, basestring):
|
||||
if isinstance(o, str):
|
||||
_encoding = self.encoding
|
||||
if (_encoding is not None
|
||||
and not (_encoding == 'utf-8')):
|
||||
o = o.decode(_encoding)
|
||||
if self.ensure_ascii:
|
||||
return encode_basestring_ascii(o)
|
||||
else:
|
||||
return encode_basestring(o)
|
||||
# This doesn't pass the iterator directly to ''.join() because the
|
||||
# exceptions aren't as detailed. The list call should be roughly
|
||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
||||
chunks = self.iterencode(o, _one_shot=True)
|
||||
if not isinstance(chunks, (list, tuple)):
|
||||
chunks = list(chunks)
|
||||
return ''.join(chunks)
|
||||
|
||||
def iterencode(self, o, _one_shot=False):
|
||||
"""Encode the given object and yield each string
|
||||
representation as available.
|
||||
|
||||
For example::
|
||||
|
||||
for chunk in JSONEncoder().iterencode(bigobject):
|
||||
mysocket.write(chunk)
|
||||
|
||||
"""
|
||||
if self.check_circular:
|
||||
markers = {}
|
||||
else:
|
||||
markers = None
|
||||
if self.ensure_ascii:
|
||||
_encoder = encode_basestring_ascii
|
||||
else:
|
||||
_encoder = encode_basestring
|
||||
if self.encoding != 'utf-8':
|
||||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
||||
if isinstance(o, str):
|
||||
o = o.decode(_encoding)
|
||||
return _orig_encoder(o)
|
||||
|
||||
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
|
||||
# Check for specials. Note that this type of test is processor- and/or
|
||||
# platform-specific, so do tests which don't depend on the internals.
|
||||
|
||||
if o != o:
|
||||
text = 'NaN'
|
||||
elif o == _inf:
|
||||
text = 'Infinity'
|
||||
elif o == _neginf:
|
||||
text = '-Infinity'
|
||||
else:
|
||||
return _repr(o)
|
||||
|
||||
if not allow_nan:
|
||||
raise ValueError(
|
||||
"Out of range float values are not JSON compliant: " +
|
||||
repr(o))
|
||||
|
||||
return text
|
||||
|
||||
|
||||
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
|
||||
_iterencode = c_make_encoder(
|
||||
markers, self.default, _encoder, self.indent,
|
||||
self.key_separator, self.item_separator, self.sort_keys,
|
||||
self.skipkeys, self.allow_nan)
|
||||
else:
|
||||
_iterencode = _make_iterencode(
|
||||
markers, self.default, _encoder, self.indent, floatstr,
|
||||
self.key_separator, self.item_separator, self.sort_keys,
|
||||
self.skipkeys, _one_shot)
|
||||
return _iterencode(o, 0)
|
||||
|
||||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
||||
## HACK: hand-optimized bytecode; turn globals into locals
|
||||
False=False,
|
||||
True=True,
|
||||
ValueError=ValueError,
|
||||
basestring=basestring,
|
||||
dict=dict,
|
||||
float=float,
|
||||
id=id,
|
||||
int=int,
|
||||
isinstance=isinstance,
|
||||
list=list,
|
||||
long=long,
|
||||
str=str,
|
||||
tuple=tuple,
|
||||
):
|
||||
|
||||
def _iterencode_list(lst, _current_indent_level):
|
||||
if not lst:
|
||||
yield '[]'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(lst)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = lst
|
||||
buf = '['
|
||||
if _indent is not None:
|
||||
_current_indent_level += 1
|
||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||
separator = _item_separator + newline_indent
|
||||
buf += newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
separator = _item_separator
|
||||
first = True
|
||||
for value in lst:
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
buf = separator
|
||||
if isinstance(value, basestring):
|
||||
yield buf + _encoder(value)
|
||||
elif value is None:
|
||||
yield buf + 'null'
|
||||
elif value is True:
|
||||
yield buf + 'true'
|
||||
elif value is False:
|
||||
yield buf + 'false'
|
||||
elif isinstance(value, (int, long)):
|
||||
yield buf + str(value)
|
||||
elif isinstance(value, float):
|
||||
yield buf + _floatstr(value)
|
||||
else:
|
||||
yield buf
|
||||
if isinstance(value, (list, tuple)):
|
||||
chunks = _iterencode_list(value, _current_indent_level)
|
||||
elif isinstance(value, dict):
|
||||
chunks = _iterencode_dict(value, _current_indent_level)
|
||||
else:
|
||||
chunks = _iterencode(value, _current_indent_level)
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
if newline_indent is not None:
|
||||
_current_indent_level -= 1
|
||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||
yield ']'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode_dict(dct, _current_indent_level):
|
||||
if not dct:
|
||||
yield '{}'
|
||||
return
|
||||
if markers is not None:
|
||||
markerid = id(dct)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = dct
|
||||
yield '{'
|
||||
if _indent is not None:
|
||||
_current_indent_level += 1
|
||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
||||
item_separator = _item_separator + newline_indent
|
||||
yield newline_indent
|
||||
else:
|
||||
newline_indent = None
|
||||
item_separator = _item_separator
|
||||
first = True
|
||||
if _sort_keys:
|
||||
items = dct.items()
|
||||
items.sort(key=lambda kv: kv[0])
|
||||
else:
|
||||
items = dct.iteritems()
|
||||
for key, value in items:
|
||||
if isinstance(key, basestring):
|
||||
pass
|
||||
# JavaScript is weakly typed for these, so it makes sense to
|
||||
# also allow them. Many encoders seem to do something like this.
|
||||
elif isinstance(key, float):
|
||||
key = _floatstr(key)
|
||||
elif key is True:
|
||||
key = 'true'
|
||||
elif key is False:
|
||||
key = 'false'
|
||||
elif key is None:
|
||||
key = 'null'
|
||||
elif isinstance(key, (int, long)):
|
||||
key = str(key)
|
||||
elif _skipkeys:
|
||||
continue
|
||||
else:
|
||||
raise TypeError("key " + repr(key) + " is not a string")
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
yield item_separator
|
||||
yield _encoder(key)
|
||||
yield _key_separator
|
||||
if isinstance(value, basestring):
|
||||
yield _encoder(value)
|
||||
elif value is None:
|
||||
yield 'null'
|
||||
elif value is True:
|
||||
yield 'true'
|
||||
elif value is False:
|
||||
yield 'false'
|
||||
elif isinstance(value, (int, long)):
|
||||
yield str(value)
|
||||
elif isinstance(value, float):
|
||||
yield _floatstr(value)
|
||||
else:
|
||||
if isinstance(value, (list, tuple)):
|
||||
chunks = _iterencode_list(value, _current_indent_level)
|
||||
elif isinstance(value, dict):
|
||||
chunks = _iterencode_dict(value, _current_indent_level)
|
||||
else:
|
||||
chunks = _iterencode(value, _current_indent_level)
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
if newline_indent is not None:
|
||||
_current_indent_level -= 1
|
||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
||||
yield '}'
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
def _iterencode(o, _current_indent_level):
|
||||
if isinstance(o, basestring):
|
||||
yield _encoder(o)
|
||||
elif o is None:
|
||||
yield 'null'
|
||||
elif o is True:
|
||||
yield 'true'
|
||||
elif o is False:
|
||||
yield 'false'
|
||||
elif isinstance(o, (int, long)):
|
||||
yield str(o)
|
||||
elif isinstance(o, float):
|
||||
yield _floatstr(o)
|
||||
elif isinstance(o, (list, tuple)):
|
||||
for chunk in _iterencode_list(o, _current_indent_level):
|
||||
yield chunk
|
||||
elif isinstance(o, dict):
|
||||
for chunk in _iterencode_dict(o, _current_indent_level):
|
||||
yield chunk
|
||||
else:
|
||||
if markers is not None:
|
||||
markerid = id(o)
|
||||
if markerid in markers:
|
||||
raise ValueError("Circular reference detected")
|
||||
markers[markerid] = o
|
||||
o = _default(o)
|
||||
for chunk in _iterencode(o, _current_indent_level):
|
||||
yield chunk
|
||||
if markers is not None:
|
||||
del markers[markerid]
|
||||
|
||||
return _iterencode
|
65
mitogen/ansible_mitogen/compat/simplejson/scanner.py
Normal file
65
mitogen/ansible_mitogen/compat/simplejson/scanner.py
Normal file
@ -0,0 +1,65 @@
|
||||
"""JSON token scanner
|
||||
"""
|
||||
import re
|
||||
try:
|
||||
from simplejson._speedups import make_scanner as c_make_scanner
|
||||
except ImportError:
|
||||
c_make_scanner = None
|
||||
|
||||
__all__ = ['make_scanner']
|
||||
|
||||
NUMBER_RE = re.compile(
|
||||
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
||||
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
||||
|
||||
def py_make_scanner(context):
|
||||
parse_object = context.parse_object
|
||||
parse_array = context.parse_array
|
||||
parse_string = context.parse_string
|
||||
match_number = NUMBER_RE.match
|
||||
encoding = context.encoding
|
||||
strict = context.strict
|
||||
parse_float = context.parse_float
|
||||
parse_int = context.parse_int
|
||||
parse_constant = context.parse_constant
|
||||
object_hook = context.object_hook
|
||||
|
||||
def _scan_once(string, idx):
|
||||
try:
|
||||
nextchar = string[idx]
|
||||
except IndexError:
|
||||
raise StopIteration
|
||||
|
||||
if nextchar == '"':
|
||||
return parse_string(string, idx + 1, encoding, strict)
|
||||
elif nextchar == '{':
|
||||
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
|
||||
elif nextchar == '[':
|
||||
return parse_array((string, idx + 1), _scan_once)
|
||||
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
||||
return None, idx + 4
|
||||
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
||||
return True, idx + 4
|
||||
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
||||
return False, idx + 5
|
||||
|
||||
m = match_number(string, idx)
|
||||
if m is not None:
|
||||
integer, frac, exp = m.groups()
|
||||
if frac or exp:
|
||||
res = parse_float(integer + (frac or '') + (exp or ''))
|
||||
else:
|
||||
res = parse_int(integer)
|
||||
return res, m.end()
|
||||
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
||||
return parse_constant('NaN'), idx + 3
|
||||
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
||||
return parse_constant('Infinity'), idx + 8
|
||||
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
||||
return parse_constant('-Infinity'), idx + 9
|
||||
else:
|
||||
raise StopIteration
|
||||
|
||||
return _scan_once
|
||||
|
||||
make_scanner = c_make_scanner or py_make_scanner
|
1056
mitogen/ansible_mitogen/connection.py
Normal file
1056
mitogen/ansible_mitogen/connection.py
Normal file
File diff suppressed because it is too large
Load Diff
62
mitogen/ansible_mitogen/loaders.py
Normal file
62
mitogen/ansible_mitogen/loaders.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Stable names for PluginLoader instances across Ansible versions.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
__all__ = [
|
||||
'action_loader',
|
||||
'connection_loader',
|
||||
'module_loader',
|
||||
'module_utils_loader',
|
||||
'shell_loader',
|
||||
'strategy_loader',
|
||||
]
|
||||
|
||||
try:
|
||||
from ansible.plugins.loader import action_loader
|
||||
from ansible.plugins.loader import connection_loader
|
||||
from ansible.plugins.loader import module_loader
|
||||
from ansible.plugins.loader import module_utils_loader
|
||||
from ansible.plugins.loader import shell_loader
|
||||
from ansible.plugins.loader import strategy_loader
|
||||
except ImportError: # Ansible <2.4
|
||||
from ansible.plugins import action_loader
|
||||
from ansible.plugins import connection_loader
|
||||
from ansible.plugins import module_loader
|
||||
from ansible.plugins import module_utils_loader
|
||||
from ansible.plugins import shell_loader
|
||||
from ansible.plugins import strategy_loader
|
||||
|
||||
|
||||
# These are original, unwrapped implementations
|
||||
action_loader__get = action_loader.get
|
||||
connection_loader__get = connection_loader.get
|
128
mitogen/ansible_mitogen/logging.py
Normal file
128
mitogen/ansible_mitogen/logging.py
Normal file
@ -0,0 +1,128 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
|
||||
import mitogen.core
|
||||
import mitogen.utils
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
except ImportError:
|
||||
from ansible.utils.display import Display
|
||||
display = Display()
|
||||
|
||||
|
||||
#: The process name set via :func:`set_process_name`.
|
||||
_process_name = None
|
||||
|
||||
#: The PID of the process that last called :func:`set_process_name`, so its
|
||||
#: value can be ignored in unknown fork children.
|
||||
_process_pid = None
|
||||
|
||||
|
||||
def set_process_name(name):
|
||||
"""
|
||||
Set a name to adorn log messages with.
|
||||
"""
|
||||
global _process_name
|
||||
_process_name = name
|
||||
|
||||
global _process_pid
|
||||
_process_pid = os.getpid()
|
||||
|
||||
|
||||
class Handler(logging.Handler):
|
||||
"""
|
||||
Use Mitogen's log format, but send the result to a Display method.
|
||||
"""
|
||||
def __init__(self, normal_method):
|
||||
logging.Handler.__init__(self)
|
||||
self.formatter = mitogen.utils.log_get_formatter()
|
||||
self.normal_method = normal_method
|
||||
|
||||
#: Set of target loggers that produce warnings and errors that spam the
|
||||
#: console needlessly. Their log level is forced to INFO. A better strategy
|
||||
#: may simply be to bury all target logs in DEBUG output, but not by
|
||||
#: overriding their log level as done here.
|
||||
NOISY_LOGGERS = frozenset([
|
||||
'dnf', # issue #272; warns when a package is already installed.
|
||||
'boto', # issue #541; normal boto retry logic can cause ERROR logs.
|
||||
])
|
||||
|
||||
def emit(self, record):
|
||||
mitogen_name = getattr(record, 'mitogen_name', '')
|
||||
if mitogen_name == 'stderr':
|
||||
record.levelno = logging.ERROR
|
||||
if mitogen_name in self.NOISY_LOGGERS and record.levelno >= logging.WARNING:
|
||||
record.levelno = logging.DEBUG
|
||||
|
||||
if _process_pid == os.getpid():
|
||||
process_name = _process_name
|
||||
else:
|
||||
process_name = '?'
|
||||
|
||||
s = '[%-4s %d] %s' % (process_name, os.getpid(), self.format(record))
|
||||
if record.levelno >= logging.ERROR:
|
||||
display.error(s, wrap_text=False)
|
||||
elif record.levelno >= logging.WARNING:
|
||||
display.warning(s, formatted=True)
|
||||
else:
|
||||
self.normal_method(s)
|
||||
|
||||
|
||||
def setup():
|
||||
"""
|
||||
Install handlers for Mitogen loggers to redirect them into the Ansible
|
||||
display framework. Ansible installs its own logging framework handlers when
|
||||
C.DEFAULT_LOG_PATH is set, therefore disable propagation for our handlers.
|
||||
"""
|
||||
l_mitogen = logging.getLogger('mitogen')
|
||||
l_mitogen_io = logging.getLogger('mitogen.io')
|
||||
l_ansible_mitogen = logging.getLogger('ansible_mitogen')
|
||||
l_operon = logging.getLogger('operon')
|
||||
|
||||
for logger in l_mitogen, l_mitogen_io, l_ansible_mitogen, l_operon:
|
||||
logger.handlers = [Handler(display.vvv)]
|
||||
logger.propagate = False
|
||||
|
||||
if display.verbosity > 2:
|
||||
l_ansible_mitogen.setLevel(logging.DEBUG)
|
||||
l_mitogen.setLevel(logging.DEBUG)
|
||||
else:
|
||||
# Mitogen copies the active log level into new children, allowing them
|
||||
# to filter tiny messages before they hit the network, and therefore
|
||||
# before they wake the IO loop. Explicitly setting INFO saves ~4%
|
||||
# running against just the local machine.
|
||||
l_mitogen.setLevel(logging.ERROR)
|
||||
l_ansible_mitogen.setLevel(logging.ERROR)
|
||||
|
||||
if display.verbosity > 3:
|
||||
l_mitogen_io.setLevel(logging.DEBUG)
|
428
mitogen/ansible_mitogen/mixins.py
Normal file
428
mitogen/ansible_mitogen/mixins.py
Normal file
@ -0,0 +1,428 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import pwd
|
||||
import random
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from shlex import quote as shlex_quote
|
||||
except ImportError:
|
||||
from pipes import quote as shlex_quote
|
||||
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.parsing.utils.jsonify import jsonify
|
||||
|
||||
import ansible
|
||||
import ansible.constants
|
||||
import ansible.plugins
|
||||
import ansible.plugins.action
|
||||
|
||||
import mitogen.core
|
||||
import mitogen.select
|
||||
import mitogen.utils
|
||||
|
||||
import ansible_mitogen.connection
|
||||
import ansible_mitogen.planner
|
||||
import ansible_mitogen.target
|
||||
from ansible.module_utils._text import to_text
|
||||
|
||||
try:
|
||||
from ansible.utils.unsafe_proxy import wrap_var
|
||||
except ImportError:
|
||||
from ansible.vars.unsafe_proxy import wrap_var
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ActionModuleMixin(ansible.plugins.action.ActionBase):
|
||||
"""
|
||||
The Mitogen-patched PluginLoader dynamically mixes this into every action
|
||||
class that Ansible attempts to load. It exists to override all the
|
||||
assumptions built into the base action class that should really belong in
|
||||
some middle layer, or at least in the connection layer.
|
||||
|
||||
Functionality is defined here for:
|
||||
|
||||
* Capturing the final set of task variables and giving Connection a chance
|
||||
to update its idea of the correct execution environment, before any
|
||||
attempt is made to call a Connection method. While it's not expected for
|
||||
the interpreter to change on a per-task basis, Ansible permits this, and
|
||||
so it must be supported.
|
||||
|
||||
* Overriding lots of methods that try to call out to shell for mundane
|
||||
reasons, such as copying files around, changing file permissions,
|
||||
creating temporary directories and suchlike.
|
||||
|
||||
* Short-circuiting any use of Ansiballz or related code for executing a
|
||||
module remotely using shell commands and SSH.
|
||||
|
||||
* Short-circuiting most of the logic in dealing with the fact that Ansible
|
||||
always runs become: tasks across at least the SSH user account and the
|
||||
destination user account, and handling the security permission issues
|
||||
that crop up due to this. Mitogen always runs a task completely within
|
||||
the target user account, so it's not a problem for us.
|
||||
"""
|
||||
def __init__(self, task, connection, *args, **kwargs):
|
||||
"""
|
||||
Verify the received connection is really a Mitogen connection. If not,
|
||||
transmute this instance back into the original unadorned base class.
|
||||
|
||||
This allows running the Mitogen strategy in mixed-target playbooks,
|
||||
where some targets use SSH while others use WinRM or some fancier UNIX
|
||||
connection plug-in. That's because when the Mitogen strategy is active,
|
||||
ActionModuleMixin is unconditionally mixed into any action module that
|
||||
is instantiated, and there is no direct way for the monkey-patch to
|
||||
know what kind of connection will be used upfront.
|
||||
"""
|
||||
super(ActionModuleMixin, self).__init__(task, connection, *args, **kwargs)
|
||||
if not isinstance(connection, ansible_mitogen.connection.Connection):
|
||||
_, self.__class__ = type(self).__bases__
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
"""
|
||||
Override run() to notify Connection of task-specific data, so it has a
|
||||
chance to know e.g. the Python interpreter in use.
|
||||
"""
|
||||
self._connection.on_action_run(
|
||||
task_vars=task_vars,
|
||||
delegate_to_hostname=self._task.delegate_to,
|
||||
loader_basedir=self._loader.get_basedir(),
|
||||
)
|
||||
return super(ActionModuleMixin, self).run(tmp, task_vars)
|
||||
|
||||
COMMAND_RESULT = {
|
||||
'rc': 0,
|
||||
'stdout': '',
|
||||
'stdout_lines': [],
|
||||
'stderr': ''
|
||||
}
|
||||
|
||||
def fake_shell(self, func, stdout=False):
|
||||
"""
|
||||
Execute a function and decorate its return value in the style of
|
||||
_low_level_execute_command(). This produces a return value that looks
|
||||
like some shell command was run, when really func() was implemented
|
||||
entirely in Python.
|
||||
|
||||
If the function raises :py:class:`mitogen.core.CallError`, this will be
|
||||
translated into a failed shell command with a non-zero exit status.
|
||||
|
||||
:param func:
|
||||
Function invoked as `func()`.
|
||||
:returns:
|
||||
See :py:attr:`COMMAND_RESULT`.
|
||||
"""
|
||||
dct = self.COMMAND_RESULT.copy()
|
||||
try:
|
||||
rc = func()
|
||||
if stdout:
|
||||
dct['stdout'] = repr(rc)
|
||||
except mitogen.core.CallError:
|
||||
LOG.exception('While emulating a shell command')
|
||||
dct['rc'] = 1
|
||||
dct['stderr'] = traceback.format_exc()
|
||||
|
||||
return dct
|
||||
|
||||
def _remote_file_exists(self, path):
|
||||
"""
|
||||
Determine if `path` exists by directly invoking os.path.exists() in the
|
||||
target user account.
|
||||
"""
|
||||
LOG.debug('_remote_file_exists(%r)', path)
|
||||
return self._connection.get_chain().call(
|
||||
ansible_mitogen.target.file_exists,
|
||||
mitogen.utils.cast(path)
|
||||
)
|
||||
|
||||
def _configure_module(self, module_name, module_args, task_vars=None):
|
||||
"""
|
||||
Mitogen does not use the Ansiballz framework. This call should never
|
||||
happen when ActionMixin is active, so crash if it does.
|
||||
"""
|
||||
assert False, "_configure_module() should never be called."
|
||||
|
||||
def _is_pipelining_enabled(self, module_style, wrap_async=False):
|
||||
"""
|
||||
Mitogen does not use SSH pipelining. This call should never happen when
|
||||
ActionMixin is active, so crash if it does.
|
||||
"""
|
||||
assert False, "_is_pipelining_enabled() should never be called."
|
||||
|
||||
def _generate_tmp_path(self):
|
||||
return os.path.join(
|
||||
self._connection.get_good_temp_dir(),
|
||||
'ansible_mitogen_action_%016x' % (
|
||||
random.getrandbits(8*8),
|
||||
)
|
||||
)
|
||||
|
||||
def _make_tmp_path(self, remote_user=None):
|
||||
"""
|
||||
Create a temporary subdirectory as a child of the temporary directory
|
||||
managed by the remote interpreter.
|
||||
"""
|
||||
LOG.debug('_make_tmp_path(remote_user=%r)', remote_user)
|
||||
path = self._generate_tmp_path()
|
||||
LOG.debug('Temporary directory: %r', path)
|
||||
self._connection.get_chain().call_no_reply(os.mkdir, path)
|
||||
self._connection._shell.tmpdir = path
|
||||
return path
|
||||
|
||||
def _remove_tmp_path(self, tmp_path):
|
||||
"""
|
||||
Replace the base implementation's invocation of rm -rf, replacing it
|
||||
with a pipelined call to :func:`ansible_mitogen.target.prune_tree`.
|
||||
"""
|
||||
LOG.debug('_remove_tmp_path(%r)', tmp_path)
|
||||
if tmp_path is None and ansible.__version__ > '2.6':
|
||||
tmp_path = self._connection._shell.tmpdir # 06f73ad578d
|
||||
if tmp_path is not None:
|
||||
self._connection.get_chain().call_no_reply(
|
||||
ansible_mitogen.target.prune_tree,
|
||||
tmp_path,
|
||||
)
|
||||
self._connection._shell.tmpdir = None
|
||||
|
||||
def _transfer_data(self, remote_path, data):
|
||||
"""
|
||||
Used by the base _execute_module(), and in <2.4 also by the template
|
||||
action module, and probably others.
|
||||
"""
|
||||
if isinstance(data, dict):
|
||||
data = jsonify(data)
|
||||
if not isinstance(data, bytes):
|
||||
data = to_bytes(data, errors='surrogate_or_strict')
|
||||
|
||||
LOG.debug('_transfer_data(%r, %s ..%d bytes)',
|
||||
remote_path, type(data), len(data))
|
||||
self._connection.put_data(remote_path, data)
|
||||
return remote_path
|
||||
|
||||
#: Actions listed here cause :func:`_fixup_perms2` to avoid a needless
|
||||
#: roundtrip, as they modify file modes separately afterwards. This is due
|
||||
#: to the method prototype having a default of `execute=True`.
|
||||
FIXUP_PERMS_RED_HERRING = set(['copy'])
|
||||
|
||||
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
|
||||
"""
|
||||
Mitogen always executes ActionBase helper methods in the context of the
|
||||
target user account, so it is never necessary to modify permissions
|
||||
except to ensure the execute bit is set if requested.
|
||||
"""
|
||||
LOG.debug('_fixup_perms2(%r, remote_user=%r, execute=%r)',
|
||||
remote_paths, remote_user, execute)
|
||||
if execute and self._task.action not in self.FIXUP_PERMS_RED_HERRING:
|
||||
return self._remote_chmod(remote_paths, mode='u+x')
|
||||
return self.COMMAND_RESULT.copy()
|
||||
|
||||
def _remote_chmod(self, paths, mode, sudoable=False):
|
||||
"""
|
||||
Issue an asynchronous set_file_mode() call for every path in `paths`,
|
||||
then format the resulting return value list with fake_shell().
|
||||
"""
|
||||
LOG.debug('_remote_chmod(%r, mode=%r, sudoable=%r)',
|
||||
paths, mode, sudoable)
|
||||
return self.fake_shell(lambda: mitogen.select.Select.all(
|
||||
self._connection.get_chain().call_async(
|
||||
ansible_mitogen.target.set_file_mode, path, mode
|
||||
)
|
||||
for path in paths
|
||||
))
|
||||
|
||||
def _remote_chown(self, paths, user, sudoable=False):
|
||||
"""
|
||||
Issue an asynchronous os.chown() call for every path in `paths`, then
|
||||
format the resulting return value list with fake_shell().
|
||||
"""
|
||||
LOG.debug('_remote_chown(%r, user=%r, sudoable=%r)',
|
||||
paths, user, sudoable)
|
||||
ent = self._connection.get_chain().call(pwd.getpwnam, user)
|
||||
return self.fake_shell(lambda: mitogen.select.Select.all(
|
||||
self._connection.get_chain().call_async(
|
||||
os.chown, path, ent.pw_uid, ent.pw_gid
|
||||
)
|
||||
for path in paths
|
||||
))
|
||||
|
||||
def _remote_expand_user(self, path, sudoable=True):
|
||||
"""
|
||||
Replace the base implementation's attempt to emulate
|
||||
os.path.expanduser() with an actual call to os.path.expanduser().
|
||||
|
||||
:param bool sudoable:
|
||||
If :data:`True`, indicate unqualified tilde ("~" with no username)
|
||||
should be evaluated in the context of the login account, not any
|
||||
become_user.
|
||||
"""
|
||||
LOG.debug('_remote_expand_user(%r, sudoable=%r)', path, sudoable)
|
||||
if not path.startswith('~'):
|
||||
# /home/foo -> /home/foo
|
||||
return path
|
||||
if sudoable or not self._play_context.become:
|
||||
if path == '~':
|
||||
# ~ -> /home/dmw
|
||||
return self._connection.homedir
|
||||
if path.startswith('~/'):
|
||||
# ~/.ansible -> /home/dmw/.ansible
|
||||
return os.path.join(self._connection.homedir, path[2:])
|
||||
# ~root/.ansible -> /root/.ansible
|
||||
return self._connection.get_chain(use_login=(not sudoable)).call(
|
||||
os.path.expanduser,
|
||||
mitogen.utils.cast(path),
|
||||
)
|
||||
|
||||
def get_task_timeout_secs(self):
|
||||
"""
|
||||
Return the task "async:" value, portable across 2.4-2.5.
|
||||
"""
|
||||
try:
|
||||
return self._task.async_val
|
||||
except AttributeError:
|
||||
return getattr(self._task, 'async')
|
||||
|
||||
def _set_temp_file_args(self, module_args, wrap_async):
|
||||
# Ansible>2.5 module_utils reuses the action's temporary directory if
|
||||
# one exists. Older versions error if this key is present.
|
||||
if ansible.__version__ > '2.5':
|
||||
if wrap_async:
|
||||
# Sharing is not possible with async tasks, as in that case,
|
||||
# the directory must outlive the action plug-in.
|
||||
module_args['_ansible_tmpdir'] = None
|
||||
else:
|
||||
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
|
||||
|
||||
# If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use
|
||||
# _ansible_remote_tmp as the location to create the module's temporary
|
||||
# directory. Older versions error if this key is present.
|
||||
if ansible.__version__ > '2.6':
|
||||
module_args['_ansible_remote_tmp'] = (
|
||||
self._connection.get_good_temp_dir()
|
||||
)
|
||||
|
||||
def _execute_module(self, module_name=None, module_args=None, tmp=None,
|
||||
task_vars=None, persist_files=False,
|
||||
delete_remote_tmp=True, wrap_async=False):
|
||||
"""
|
||||
Collect up a module's execution environment then use it to invoke
|
||||
target.run_module() or helpers.run_module_async() in the target
|
||||
context.
|
||||
"""
|
||||
if module_name is None:
|
||||
module_name = self._task.action
|
||||
if module_args is None:
|
||||
module_args = self._task.args
|
||||
if task_vars is None:
|
||||
task_vars = {}
|
||||
|
||||
self._update_module_args(module_name, module_args, task_vars)
|
||||
env = {}
|
||||
self._compute_environment_string(env)
|
||||
self._set_temp_file_args(module_args, wrap_async)
|
||||
|
||||
self._connection._connect()
|
||||
result = ansible_mitogen.planner.invoke(
|
||||
ansible_mitogen.planner.Invocation(
|
||||
action=self,
|
||||
connection=self._connection,
|
||||
module_name=mitogen.core.to_text(module_name),
|
||||
module_args=mitogen.utils.cast(module_args),
|
||||
task_vars=task_vars,
|
||||
templar=self._templar,
|
||||
env=mitogen.utils.cast(env),
|
||||
wrap_async=wrap_async,
|
||||
timeout_secs=self.get_task_timeout_secs(),
|
||||
)
|
||||
)
|
||||
|
||||
if tmp and ansible.__version__ < '2.5' and delete_remote_tmp:
|
||||
# Built-in actions expected tmpdir to be cleaned up automatically
|
||||
# on _execute_module().
|
||||
self._remove_tmp_path(tmp)
|
||||
|
||||
return wrap_var(result)
|
||||
|
||||
def _postprocess_response(self, result):
|
||||
"""
|
||||
Apply fixups mimicking ActionBase._execute_module(); this is copied
|
||||
verbatim from action/__init__.py, the guts of _parse_returned_data are
|
||||
garbage and should be removed or reimplemented once tests exist.
|
||||
|
||||
:param dict result:
|
||||
Dictionary with format::
|
||||
|
||||
{
|
||||
"rc": int,
|
||||
"stdout": "stdout data",
|
||||
"stderr": "stderr data"
|
||||
}
|
||||
"""
|
||||
data = self._parse_returned_data(result)
|
||||
|
||||
# Cutpasted from the base implementation.
|
||||
if 'stdout' in data and 'stdout_lines' not in data:
|
||||
data['stdout_lines'] = (data['stdout'] or u'').splitlines()
|
||||
if 'stderr' in data and 'stderr_lines' not in data:
|
||||
data['stderr_lines'] = (data['stderr'] or u'').splitlines()
|
||||
|
||||
return data
|
||||
|
||||
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None,
|
||||
executable=None,
|
||||
encoding_errors='surrogate_then_replace',
|
||||
chdir=None):
|
||||
"""
|
||||
Override the base implementation by simply calling
|
||||
target.exec_command() in the target context.
|
||||
"""
|
||||
LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)',
|
||||
cmd, type(in_data), executable, chdir)
|
||||
if executable is None: # executable defaults to False
|
||||
executable = self._play_context.executable
|
||||
if executable:
|
||||
cmd = executable + ' -c ' + shlex_quote(cmd)
|
||||
|
||||
rc, stdout, stderr = self._connection.exec_command(
|
||||
cmd=cmd,
|
||||
in_data=in_data,
|
||||
sudoable=sudoable,
|
||||
mitogen_chdir=chdir,
|
||||
)
|
||||
stdout_text = to_text(stdout, errors=encoding_errors)
|
||||
|
||||
return {
|
||||
'rc': rc,
|
||||
'stdout': stdout_text,
|
||||
'stdout_lines': stdout_text.splitlines(),
|
||||
'stderr': stderr,
|
||||
}
|
157
mitogen/ansible_mitogen/module_finder.py
Normal file
157
mitogen/ansible_mitogen/module_finder.py
Normal file
@ -0,0 +1,157 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import imp
|
||||
import os
|
||||
|
||||
import mitogen.master
|
||||
|
||||
|
||||
PREFIX = 'ansible.module_utils.'
|
||||
|
||||
|
||||
Module = collections.namedtuple('Module', 'name path kind parent')
|
||||
|
||||
|
||||
def get_fullname(module):
|
||||
"""
|
||||
Reconstruct a Module's canonical path by recursing through its parents.
|
||||
"""
|
||||
bits = [str(module.name)]
|
||||
while module.parent:
|
||||
bits.append(str(module.parent.name))
|
||||
module = module.parent
|
||||
return '.'.join(reversed(bits))
|
||||
|
||||
|
||||
def get_code(module):
|
||||
"""
|
||||
Compile and return a Module's code object.
|
||||
"""
|
||||
fp = open(module.path, 'rb')
|
||||
try:
|
||||
return compile(fp.read(), str(module.name), 'exec')
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
|
||||
def is_pkg(module):
|
||||
"""
|
||||
Return :data:`True` if a Module represents a package.
|
||||
"""
|
||||
return module.kind == imp.PKG_DIRECTORY
|
||||
|
||||
|
||||
def find(name, path=(), parent=None):
|
||||
"""
|
||||
Return a Module instance describing the first matching module found on the
|
||||
search path.
|
||||
|
||||
:param str name:
|
||||
Module name.
|
||||
:param list path:
|
||||
List of directory names to search for the module.
|
||||
:param Module parent:
|
||||
Optional module parent.
|
||||
"""
|
||||
assert isinstance(path, tuple)
|
||||
head, _, tail = name.partition('.')
|
||||
try:
|
||||
tup = imp.find_module(head, list(path))
|
||||
except ImportError:
|
||||
return parent
|
||||
|
||||
fp, modpath, (suffix, mode, kind) = tup
|
||||
if fp:
|
||||
fp.close()
|
||||
|
||||
if parent and modpath == parent.path:
|
||||
# 'from timeout import timeout', where 'timeout' is a function but also
|
||||
# the name of the module being imported.
|
||||
return None
|
||||
|
||||
if kind == imp.PKG_DIRECTORY:
|
||||
modpath = os.path.join(modpath, '__init__.py')
|
||||
|
||||
module = Module(head, modpath, kind, parent)
|
||||
# TODO: this code is entirely wrong on Python 3.x, but works well enough
|
||||
# for Ansible. We need a new find_child() that only looks in the package
|
||||
# directory, never falling back to the parent search path.
|
||||
if tail and kind == imp.PKG_DIRECTORY:
|
||||
return find_relative(module, tail, path)
|
||||
return module
|
||||
|
||||
|
||||
def find_relative(parent, name, path=()):
|
||||
if parent.kind == imp.PKG_DIRECTORY:
|
||||
path = (os.path.dirname(parent.path),) + path
|
||||
return find(name, path, parent=parent)
|
||||
|
||||
|
||||
def scan_fromlist(code):
|
||||
for level, modname_s, fromlist in mitogen.master.scan_code_imports(code):
|
||||
for name in fromlist:
|
||||
yield level, '%s.%s' % (modname_s, name)
|
||||
if not fromlist:
|
||||
yield level, modname_s
|
||||
|
||||
|
||||
def scan(module_name, module_path, search_path):
|
||||
module = Module(module_name, module_path, imp.PY_SOURCE, None)
|
||||
stack = [module]
|
||||
seen = set()
|
||||
|
||||
while stack:
|
||||
module = stack.pop(0)
|
||||
for level, fromname in scan_fromlist(get_code(module)):
|
||||
if not fromname.startswith(PREFIX):
|
||||
continue
|
||||
|
||||
imported = find(fromname[len(PREFIX):], search_path)
|
||||
if imported is None or imported in seen:
|
||||
continue
|
||||
|
||||
seen.add(imported)
|
||||
stack.append(imported)
|
||||
parent = imported.parent
|
||||
while parent:
|
||||
fullname = get_fullname(parent)
|
||||
module = Module(fullname, parent.path, parent.kind, None)
|
||||
if module not in seen:
|
||||
seen.add(module)
|
||||
stack.append(module)
|
||||
parent = parent.parent
|
||||
|
||||
return sorted(
|
||||
(PREFIX + get_fullname(module), module.path, is_pkg(module))
|
||||
for module in seen
|
||||
)
|
76
mitogen/ansible_mitogen/parsing.py
Normal file
76
mitogen/ansible_mitogen/parsing.py
Normal file
@ -0,0 +1,76 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import mitogen.core
|
||||
|
||||
|
||||
def parse_script_interpreter(source):
|
||||
"""
|
||||
Parse the script interpreter portion of a UNIX hashbang using the rules
|
||||
Linux uses.
|
||||
|
||||
:param str source: String like "/usr/bin/env python".
|
||||
|
||||
:returns:
|
||||
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
||||
interpreter and `arg` is its sole argument if present, otherwise
|
||||
:py:data:`None`.
|
||||
"""
|
||||
# Find terminating newline. Assume last byte of binprm_buf if absent.
|
||||
nl = source.find(b'\n', 0, 128)
|
||||
if nl == -1:
|
||||
nl = min(128, len(source))
|
||||
|
||||
# Split once on the first run of whitespace. If no whitespace exists,
|
||||
# bits just contains the interpreter filename.
|
||||
bits = source[0:nl].strip().split(None, 1)
|
||||
if len(bits) == 1:
|
||||
return mitogen.core.to_text(bits[0]), None
|
||||
return mitogen.core.to_text(bits[0]), mitogen.core.to_text(bits[1])
|
||||
|
||||
|
||||
def parse_hashbang(source):
|
||||
"""
|
||||
Parse a UNIX "hashbang line" using the syntax supported by Linux.
|
||||
|
||||
:param str source: String like "#!/usr/bin/env python".
|
||||
|
||||
:returns:
|
||||
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
||||
interpreter and `arg` is its sole argument if present, otherwise
|
||||
:py:data:`None`.
|
||||
"""
|
||||
# Linux requires first 2 bytes with no whitespace, pretty sure it's the
|
||||
# same everywhere. See binfmt_script.c.
|
||||
if not source.startswith(b'#!'):
|
||||
return None, None
|
||||
|
||||
return parse_script_interpreter(source[2:])
|
576
mitogen/ansible_mitogen/planner.py
Normal file
576
mitogen/ansible_mitogen/planner.py
Normal file
@ -0,0 +1,576 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""
|
||||
Classes to detect each case from [0] and prepare arguments necessary for the
|
||||
corresponding Runner class within the target, including preloading requisite
|
||||
files/modules known missing.
|
||||
|
||||
[0] "Ansible Module Architecture", developing_program_flow_modules.html
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
|
||||
from ansible.executor import module_common
|
||||
import ansible.errors
|
||||
import ansible.module_utils
|
||||
import ansible.release
|
||||
import mitogen.core
|
||||
import mitogen.select
|
||||
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.parsing
|
||||
import ansible_mitogen.target
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
|
||||
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
|
||||
NO_MODULE_MSG = 'The module %s was not found in configured module paths.'
|
||||
|
||||
_planner_by_path = {}
|
||||
|
||||
|
||||
class Invocation(object):
|
||||
"""
|
||||
Collect up a module's execution environment then use it to invoke
|
||||
target.run_module() or helpers.run_module_async() in the target context.
|
||||
"""
|
||||
def __init__(self, action, connection, module_name, module_args,
|
||||
task_vars, templar, env, wrap_async, timeout_secs):
|
||||
#: ActionBase instance invoking the module. Required to access some
|
||||
#: output postprocessing methods that don't belong in ActionBase at
|
||||
#: all.
|
||||
self.action = action
|
||||
#: Ansible connection to use to contact the target. Must be an
|
||||
#: ansible_mitogen connection.
|
||||
self.connection = connection
|
||||
#: Name of the module ('command', 'shell', etc.) to execute.
|
||||
self.module_name = module_name
|
||||
#: Final module arguments.
|
||||
self.module_args = module_args
|
||||
#: Task variables, needed to extract ansible_*_interpreter.
|
||||
self.task_vars = task_vars
|
||||
#: Templar, needed to extract ansible_*_interpreter.
|
||||
self.templar = templar
|
||||
#: Final module environment.
|
||||
self.env = env
|
||||
#: Boolean, if :py:data:`True`, launch the module asynchronously.
|
||||
self.wrap_async = wrap_async
|
||||
#: Integer, if >0, limit the time an asynchronous job may run for.
|
||||
self.timeout_secs = timeout_secs
|
||||
#: Initially ``None``, but set by :func:`invoke`. The path on the
|
||||
#: master to the module's implementation file.
|
||||
self.module_path = None
|
||||
#: Initially ``None``, but set by :func:`invoke`. The raw source or
|
||||
#: binary contents of the module.
|
||||
self._module_source = None
|
||||
|
||||
def get_module_source(self):
|
||||
if self._module_source is None:
|
||||
self._module_source = read_file(self.module_path)
|
||||
return self._module_source
|
||||
|
||||
def __repr__(self):
|
||||
return 'Invocation(module_name=%s)' % (self.module_name,)
|
||||
|
||||
|
||||
class Planner(object):
|
||||
"""
|
||||
A Planner receives a module name and the contents of its implementation
|
||||
file, indicates whether or not it understands how to run the module, and
|
||||
exports a method to run the module.
|
||||
"""
|
||||
def __init__(self, invocation):
|
||||
self._inv = invocation
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
"""
|
||||
Return true if the supplied `invocation` matches the module type
|
||||
implemented by this planner.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def should_fork(self):
|
||||
"""
|
||||
Asynchronous tasks must always be forked.
|
||||
"""
|
||||
return self._inv.wrap_async
|
||||
|
||||
def get_push_files(self):
|
||||
"""
|
||||
Return a list of files that should be propagated to the target context
|
||||
using PushFileService. The default implementation pushes nothing.
|
||||
"""
|
||||
return []
|
||||
|
||||
def get_module_deps(self):
|
||||
"""
|
||||
Return a list of the Python module names imported by the module.
|
||||
"""
|
||||
return []
|
||||
|
||||
def get_kwargs(self, **kwargs):
|
||||
"""
|
||||
If :meth:`detect` returned :data:`True`, plan for the module's
|
||||
execution, including granting access to or delivering any files to it
|
||||
that are known to be absent, and finally return a dict::
|
||||
|
||||
{
|
||||
# Name of the class from runners.py that implements the
|
||||
# target-side execution of this module type.
|
||||
"runner_name": "...",
|
||||
|
||||
# Remaining keys are passed to the constructor of the class
|
||||
# named by `runner_name`.
|
||||
}
|
||||
"""
|
||||
binding = self._inv.connection.get_binding()
|
||||
|
||||
new = dict((mitogen.core.UnicodeType(k), kwargs[k])
|
||||
for k in kwargs)
|
||||
new.setdefault('good_temp_dir',
|
||||
self._inv.connection.get_good_temp_dir())
|
||||
new.setdefault('cwd', self._inv.connection.get_default_cwd())
|
||||
new.setdefault('extra_env', self._inv.connection.get_default_env())
|
||||
new.setdefault('emulate_tty', True)
|
||||
new.setdefault('service_context', binding.get_child_service_context())
|
||||
return new
|
||||
|
||||
def __repr__(self):
|
||||
return '%s()' % (type(self).__name__,)
|
||||
|
||||
|
||||
class BinaryPlanner(Planner):
|
||||
"""
|
||||
Binary modules take their arguments and will return data to Ansible in the
|
||||
same way as want JSON modules.
|
||||
"""
|
||||
runner_name = 'BinaryRunner'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
return module_common._is_binary(source)
|
||||
|
||||
def get_push_files(self):
|
||||
return [mitogen.core.to_text(self._inv.module_path)]
|
||||
|
||||
def get_kwargs(self, **kwargs):
|
||||
return super(BinaryPlanner, self).get_kwargs(
|
||||
runner_name=self.runner_name,
|
||||
module=self._inv.module_name,
|
||||
path=self._inv.module_path,
|
||||
json_args=json.dumps(self._inv.module_args),
|
||||
env=self._inv.env,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class ScriptPlanner(BinaryPlanner):
|
||||
"""
|
||||
Common functionality for script module planners -- handle interpreter
|
||||
detection and rewrite.
|
||||
"""
|
||||
def _rewrite_interpreter(self, path):
|
||||
"""
|
||||
Given the original interpreter binary extracted from the script's
|
||||
interpreter line, look up the associated `ansible_*_interpreter`
|
||||
variable, render it and return it.
|
||||
|
||||
:param str path:
|
||||
Absolute UNIX path to original interpreter.
|
||||
|
||||
:returns:
|
||||
Shell fragment prefix used to execute the script via "/bin/sh -c".
|
||||
While `ansible_*_interpreter` documentation suggests shell isn't
|
||||
involved here, the vanilla implementation uses it and that use is
|
||||
exploited in common playbooks.
|
||||
"""
|
||||
key = u'ansible_%s_interpreter' % os.path.basename(path).strip()
|
||||
try:
|
||||
template = self._inv.task_vars[key]
|
||||
except KeyError:
|
||||
return path
|
||||
|
||||
return mitogen.utils.cast(self._inv.templar.template(template))
|
||||
|
||||
def _get_interpreter(self):
|
||||
path, arg = ansible_mitogen.parsing.parse_hashbang(
|
||||
self._inv.get_module_source()
|
||||
)
|
||||
if path is None:
|
||||
raise ansible.errors.AnsibleError(NO_INTERPRETER_MSG % (
|
||||
self._inv.module_name,
|
||||
))
|
||||
|
||||
fragment = self._rewrite_interpreter(path)
|
||||
if arg:
|
||||
fragment += ' ' + arg
|
||||
|
||||
return fragment, path.startswith('python')
|
||||
|
||||
def get_kwargs(self, **kwargs):
|
||||
interpreter_fragment, is_python = self._get_interpreter()
|
||||
return super(ScriptPlanner, self).get_kwargs(
|
||||
interpreter_fragment=interpreter_fragment,
|
||||
is_python=is_python,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class JsonArgsPlanner(ScriptPlanner):
|
||||
"""
|
||||
Script that has its interpreter directive and the task arguments
|
||||
substituted into its source as a JSON string.
|
||||
"""
|
||||
runner_name = 'JsonArgsRunner'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
return module_common.REPLACER_JSONARGS in source
|
||||
|
||||
|
||||
class WantJsonPlanner(ScriptPlanner):
|
||||
"""
|
||||
If a module has the string WANT_JSON in it anywhere, Ansible treats it as a
|
||||
non-native module that accepts a filename as its only command line
|
||||
parameter. The filename is for a temporary file containing a JSON string
|
||||
containing the module's parameters. The module needs to open the file, read
|
||||
and parse the parameters, operate on the data, and print its return data as
|
||||
a JSON encoded dictionary to stdout before exiting.
|
||||
|
||||
These types of modules are self-contained entities. As of Ansible 2.1,
|
||||
Ansible only modifies them to change a shebang line if present.
|
||||
"""
|
||||
runner_name = 'WantJsonRunner'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
return b'WANT_JSON' in source
|
||||
|
||||
|
||||
class NewStylePlanner(ScriptPlanner):
|
||||
"""
|
||||
The Ansiballz framework differs from module replacer in that it uses real
|
||||
Python imports of things in ansible/module_utils instead of merely
|
||||
preprocessing the module.
|
||||
"""
|
||||
runner_name = 'NewStyleRunner'
|
||||
marker = b'from ansible.module_utils.'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
return cls.marker in source
|
||||
|
||||
def _get_interpreter(self):
|
||||
return None, None
|
||||
|
||||
def get_push_files(self):
|
||||
return super(NewStylePlanner, self).get_push_files() + [
|
||||
mitogen.core.to_text(path)
|
||||
for fullname, path, is_pkg in self.get_module_map()['custom']
|
||||
]
|
||||
|
||||
def get_module_deps(self):
|
||||
return self.get_module_map()['builtin']
|
||||
|
||||
#: Module names appearing in this set always require forking, usually due
|
||||
#: to some terminal leakage that cannot be worked around in any sane
|
||||
#: manner.
|
||||
ALWAYS_FORK_MODULES = frozenset([
|
||||
'dnf', # issue #280; py-dnf/hawkey need therapy
|
||||
'firewalld', # issue #570: ansible module_utils caches dbus conn
|
||||
])
|
||||
|
||||
def should_fork(self):
|
||||
"""
|
||||
In addition to asynchronous tasks, new-style modules should be forked
|
||||
if:
|
||||
|
||||
* the user specifies mitogen_task_isolation=fork, or
|
||||
* the new-style module has a custom module search path, or
|
||||
* the module is known to leak like a sieve.
|
||||
"""
|
||||
return (
|
||||
super(NewStylePlanner, self).should_fork() or
|
||||
(self._inv.task_vars.get('mitogen_task_isolation') == 'fork') or
|
||||
(self._inv.module_name in self.ALWAYS_FORK_MODULES) or
|
||||
(len(self.get_module_map()['custom']) > 0)
|
||||
)
|
||||
|
||||
def get_search_path(self):
|
||||
return tuple(
|
||||
path
|
||||
for path in ansible_mitogen.loaders.module_utils_loader._get_paths(
|
||||
subdirs=False
|
||||
)
|
||||
)
|
||||
|
||||
_module_map = None
|
||||
|
||||
def get_module_map(self):
|
||||
if self._module_map is None:
|
||||
binding = self._inv.connection.get_binding()
|
||||
self._module_map = mitogen.service.call(
|
||||
call_context=binding.get_service_context(),
|
||||
service_name='ansible_mitogen.services.ModuleDepService',
|
||||
method_name='scan',
|
||||
|
||||
module_name='ansible_module_%s' % (self._inv.module_name,),
|
||||
module_path=self._inv.module_path,
|
||||
search_path=self.get_search_path(),
|
||||
builtin_path=module_common._MODULE_UTILS_PATH,
|
||||
context=self._inv.connection.context,
|
||||
)
|
||||
return self._module_map
|
||||
|
||||
def get_kwargs(self):
|
||||
return super(NewStylePlanner, self).get_kwargs(
|
||||
module_map=self.get_module_map(),
|
||||
py_module_name=py_modname_from_path(
|
||||
self._inv.module_name,
|
||||
self._inv.module_path,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ReplacerPlanner(NewStylePlanner):
|
||||
"""
|
||||
The Module Replacer framework is the original framework implementing
|
||||
new-style modules. It is essentially a preprocessor (like the C
|
||||
Preprocessor for those familiar with that programming language). It does
|
||||
straight substitutions of specific substring patterns in the module file.
|
||||
There are two types of substitutions.
|
||||
|
||||
* Replacements that only happen in the module file. These are public
|
||||
replacement strings that modules can utilize to get helpful boilerplate
|
||||
or access to arguments.
|
||||
|
||||
"from ansible.module_utils.MOD_LIB_NAME import *" is replaced with the
|
||||
contents of the ansible/module_utils/MOD_LIB_NAME.py. These should only
|
||||
be used with new-style Python modules.
|
||||
|
||||
"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>" is equivalent to
|
||||
"from ansible.module_utils.basic import *" and should also only apply to
|
||||
new-style Python modules.
|
||||
|
||||
"# POWERSHELL_COMMON" substitutes the contents of
|
||||
"ansible/module_utils/powershell.ps1". It should only be used with
|
||||
new-style Powershell modules.
|
||||
"""
|
||||
runner_name = 'ReplacerRunner'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
return module_common.REPLACER in source
|
||||
|
||||
|
||||
class OldStylePlanner(ScriptPlanner):
|
||||
runner_name = 'OldStyleRunner'
|
||||
|
||||
@classmethod
|
||||
def detect(cls, path, source):
|
||||
# Everything else.
|
||||
return True
|
||||
|
||||
|
||||
_planners = [
|
||||
BinaryPlanner,
|
||||
# ReplacerPlanner,
|
||||
NewStylePlanner,
|
||||
JsonArgsPlanner,
|
||||
WantJsonPlanner,
|
||||
OldStylePlanner,
|
||||
]
|
||||
|
||||
|
||||
try:
|
||||
_get_ansible_module_fqn = module_common._get_ansible_module_fqn
|
||||
except AttributeError:
|
||||
_get_ansible_module_fqn = None
|
||||
|
||||
|
||||
def py_modname_from_path(name, path):
|
||||
"""
|
||||
Fetch the logical name of a new-style module as it might appear in
|
||||
:data:`sys.modules` of the target's Python interpreter.
|
||||
|
||||
* For Ansible <2.7, this is an unpackaged module named like
|
||||
"ansible_module_%s".
|
||||
|
||||
* For Ansible <2.9, this is an unpackaged module named like
|
||||
"ansible.modules.%s"
|
||||
|
||||
* Since Ansible 2.9, modules appearing within a package have the original
|
||||
package hierarchy approximated on the target, enabling relative imports
|
||||
to function correctly. For example, "ansible.modules.system.setup".
|
||||
"""
|
||||
# 2.9+
|
||||
if _get_ansible_module_fqn:
|
||||
try:
|
||||
return _get_ansible_module_fqn(path)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if ansible.__version__ < '2.7':
|
||||
return 'ansible_module_' + name
|
||||
|
||||
return 'ansible.modules.' + name
|
||||
|
||||
|
||||
def read_file(path):
|
||||
fd = os.open(path, os.O_RDONLY)
|
||||
try:
|
||||
bits = []
|
||||
chunk = True
|
||||
while True:
|
||||
chunk = os.read(fd, 65536)
|
||||
if not chunk:
|
||||
break
|
||||
bits.append(chunk)
|
||||
finally:
|
||||
os.close(fd)
|
||||
|
||||
return mitogen.core.b('').join(bits)
|
||||
|
||||
|
||||
def _propagate_deps(invocation, planner, context):
|
||||
binding = invocation.connection.get_binding()
|
||||
mitogen.service.call(
|
||||
call_context=binding.get_service_context(),
|
||||
service_name='mitogen.service.PushFileService',
|
||||
method_name='propagate_paths_and_modules',
|
||||
|
||||
context=context,
|
||||
paths=planner.get_push_files(),
|
||||
modules=planner.get_module_deps(),
|
||||
)
|
||||
|
||||
|
||||
def _invoke_async_task(invocation, planner):
|
||||
job_id = '%016x' % random.randint(0, 2**64)
|
||||
context = invocation.connection.spawn_isolated_child()
|
||||
_propagate_deps(invocation, planner, context)
|
||||
|
||||
with mitogen.core.Receiver(context.router) as started_recv:
|
||||
call_recv = context.call_async(
|
||||
ansible_mitogen.target.run_module_async,
|
||||
job_id=job_id,
|
||||
timeout_secs=invocation.timeout_secs,
|
||||
started_sender=started_recv.to_sender(),
|
||||
kwargs=planner.get_kwargs(),
|
||||
)
|
||||
|
||||
# Wait for run_module_async() to crash, or for AsyncRunner to indicate
|
||||
# the job file has been written.
|
||||
for msg in mitogen.select.Select([started_recv, call_recv]):
|
||||
if msg.receiver is call_recv:
|
||||
# It can only be an exception.
|
||||
raise msg.unpickle()
|
||||
break
|
||||
|
||||
return {
|
||||
'stdout': json.dumps({
|
||||
# modules/utilities/logic/async_wrapper.py::_run_module().
|
||||
'changed': True,
|
||||
'started': 1,
|
||||
'finished': 0,
|
||||
'ansible_job_id': job_id,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
def _invoke_isolated_task(invocation, planner):
|
||||
context = invocation.connection.spawn_isolated_child()
|
||||
_propagate_deps(invocation, planner, context)
|
||||
try:
|
||||
return context.call(
|
||||
ansible_mitogen.target.run_module,
|
||||
kwargs=planner.get_kwargs(),
|
||||
)
|
||||
finally:
|
||||
context.shutdown()
|
||||
|
||||
|
||||
def _get_planner(name, path, source):
|
||||
for klass in _planners:
|
||||
if klass.detect(path, source):
|
||||
LOG.debug('%r accepted %r (filename %r)', klass, name, path)
|
||||
return klass
|
||||
LOG.debug('%r rejected %r', klass, name)
|
||||
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
|
||||
|
||||
|
||||
def invoke(invocation):
|
||||
"""
|
||||
Find a Planner subclass corresnding to `invocation` and use it to invoke
|
||||
the module.
|
||||
|
||||
:param Invocation invocation:
|
||||
:returns:
|
||||
Module return dict.
|
||||
:raises ansible.errors.AnsibleError:
|
||||
Unrecognized/unsupported module type.
|
||||
"""
|
||||
path = ansible_mitogen.loaders.module_loader.find_plugin(
|
||||
invocation.module_name,
|
||||
'',
|
||||
)
|
||||
if path is None:
|
||||
raise ansible.errors.AnsibleError(NO_MODULE_MSG % (
|
||||
invocation.module_name,
|
||||
))
|
||||
|
||||
invocation.module_path = mitogen.core.to_text(path)
|
||||
if invocation.module_path not in _planner_by_path:
|
||||
_planner_by_path[invocation.module_path] = _get_planner(
|
||||
invocation.module_name,
|
||||
invocation.module_path,
|
||||
invocation.get_module_source()
|
||||
)
|
||||
|
||||
planner = _planner_by_path[invocation.module_path](invocation)
|
||||
if invocation.wrap_async:
|
||||
response = _invoke_async_task(invocation, planner)
|
||||
elif planner.should_fork():
|
||||
response = _invoke_isolated_task(invocation, planner)
|
||||
else:
|
||||
_propagate_deps(invocation, planner, invocation.connection.context)
|
||||
response = invocation.connection.get_chain().call(
|
||||
ansible_mitogen.target.run_module,
|
||||
kwargs=planner.get_kwargs(),
|
||||
)
|
||||
|
||||
return invocation.action._postprocess_response(response)
|
0
mitogen/ansible_mitogen/plugins/__init__.py
Normal file
0
mitogen/ansible_mitogen/plugins/__init__.py
Normal file
0
mitogen/ansible_mitogen/plugins/action/__init__.py
Normal file
0
mitogen/ansible_mitogen/plugins/action/__init__.py
Normal file
162
mitogen/ansible_mitogen/plugins/action/mitogen_fetch.py
Normal file
162
mitogen/ansible_mitogen/plugins/action/mitogen_fetch.py
Normal file
@ -0,0 +1,162 @@
|
||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.hashing import checksum, md5, secure_hash
|
||||
from ansible.utils.path import makedirs_safe
|
||||
|
||||
|
||||
REMOTE_CHECKSUM_ERRORS = {
|
||||
'0': "unable to calculate the checksum of the remote file",
|
||||
'1': "the remote file does not exist",
|
||||
'2': "no read permission on remote file",
|
||||
'3': "remote file is a directory, fetch cannot work on directories",
|
||||
'4': "python isn't present on the system. Unable to compute checksum",
|
||||
'5': "stdlib json was not found on the remote machine. Only the raw module can work without those installed",
|
||||
}
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
''' handler for fetch operations '''
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
try:
|
||||
if self._play_context.check_mode:
|
||||
result['skipped'] = True
|
||||
result['msg'] = 'check mode not (yet) supported for this module'
|
||||
return result
|
||||
|
||||
flat = boolean(self._task.args.get('flat'), strict=False)
|
||||
fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
|
||||
validate_checksum = boolean(self._task.args.get('validate_checksum', True), strict=False)
|
||||
|
||||
# validate source and dest are strings FIXME: use basic.py and module specs
|
||||
source = self._task.args.get('src')
|
||||
if not isinstance(source, string_types):
|
||||
result['msg'] = "Invalid type supplied for source option, it must be a string"
|
||||
|
||||
dest = self._task.args.get('dest')
|
||||
if not isinstance(dest, string_types):
|
||||
result['msg'] = "Invalid type supplied for dest option, it must be a string"
|
||||
|
||||
if result.get('msg'):
|
||||
result['failed'] = True
|
||||
return result
|
||||
|
||||
source = self._connection._shell.join_path(source)
|
||||
source = self._remote_expand_user(source)
|
||||
|
||||
# calculate checksum for the remote file, don't bother if using
|
||||
# become as slurp will be used Force remote_checksum to follow
|
||||
# symlinks because fetch always follows symlinks
|
||||
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
|
||||
|
||||
# calculate the destination name
|
||||
if os.path.sep not in self._connection._shell.join_path('a', ''):
|
||||
source = self._connection._shell._unquote(source)
|
||||
source_local = source.replace('\\', '/')
|
||||
else:
|
||||
source_local = source
|
||||
|
||||
dest = os.path.expanduser(dest)
|
||||
if flat:
|
||||
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
|
||||
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
|
||||
result['file'] = dest
|
||||
result['failed'] = True
|
||||
return result
|
||||
if dest.endswith(os.sep):
|
||||
# if the path ends with "/", we'll use the source filename as the
|
||||
# destination filename
|
||||
base = os.path.basename(source_local)
|
||||
dest = os.path.join(dest, base)
|
||||
if not dest.startswith("/"):
|
||||
# if dest does not start with "/", we'll assume a relative path
|
||||
dest = self._loader.path_dwim(dest)
|
||||
else:
|
||||
# files are saved in dest dir, with a subdir for each host, then the filename
|
||||
if 'inventory_hostname' in task_vars:
|
||||
target_name = task_vars['inventory_hostname']
|
||||
else:
|
||||
target_name = self._play_context.remote_addr
|
||||
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
|
||||
|
||||
dest = dest.replace("//", "/")
|
||||
|
||||
if remote_checksum in REMOTE_CHECKSUM_ERRORS:
|
||||
result['changed'] = False
|
||||
result['file'] = source
|
||||
result['msg'] = REMOTE_CHECKSUM_ERRORS[remote_checksum]
|
||||
# Historically, these don't fail because you may want to transfer
|
||||
# a log file that possibly MAY exist but keep going to fetch other
|
||||
# log files. Today, this is better achieved by adding
|
||||
# ignore_errors or failed_when to the task. Control the behaviour
|
||||
# via fail_when_missing
|
||||
if fail_on_missing:
|
||||
result['failed'] = True
|
||||
del result['changed']
|
||||
else:
|
||||
result['msg'] += ", not transferring, ignored"
|
||||
return result
|
||||
|
||||
# calculate checksum for the local file
|
||||
local_checksum = checksum(dest)
|
||||
|
||||
if remote_checksum != local_checksum:
|
||||
# create the containing directories, if needed
|
||||
makedirs_safe(os.path.dirname(dest))
|
||||
|
||||
# fetch the file and check for changes
|
||||
self._connection.fetch_file(source, dest)
|
||||
new_checksum = secure_hash(dest)
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
new_md5 = md5(dest)
|
||||
except ValueError:
|
||||
new_md5 = None
|
||||
|
||||
if validate_checksum and new_checksum != remote_checksum:
|
||||
result.update(dict(failed=True, md5sum=new_md5,
|
||||
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
|
||||
checksum=new_checksum, remote_checksum=remote_checksum))
|
||||
else:
|
||||
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
|
||||
'remote_md5sum': None, 'checksum': new_checksum,
|
||||
'remote_checksum': remote_checksum})
|
||||
else:
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
local_md5 = md5(dest)
|
||||
except ValueError:
|
||||
local_md5 = None
|
||||
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
|
||||
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tmpdir)
|
||||
|
||||
return result
|
55
mitogen/ansible_mitogen/plugins/action/mitogen_get_stack.py
Normal file
55
mitogen/ansible_mitogen/plugins/action/mitogen_get_stack.py
Normal file
@ -0,0 +1,55 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
"""
|
||||
Fetch the connection configuration stack that would be used to connect to a
|
||||
target, without actually connecting to it.
|
||||
"""
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if not isinstance(self._connection,
|
||||
ansible_mitogen.connection.Connection):
|
||||
return {
|
||||
'skipped': True,
|
||||
}
|
||||
|
||||
_, stack = self._connection._build_stack()
|
||||
return {
|
||||
'changed': True,
|
||||
'result': stack,
|
||||
'_ansible_verbose_always': True,
|
||||
}
|
Binary file not shown.
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'buildah'
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_doas.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_doas.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'mitogen_doas'
|
51
mitogen/ansible_mitogen/plugins/connection/mitogen_docker.py
Normal file
51
mitogen/ansible_mitogen/plugins/connection/mitogen_docker.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'docker'
|
||||
|
||||
@property
|
||||
def docker_cmd(self):
|
||||
"""
|
||||
Ansible 2.3 synchronize module wants to know how we run Docker.
|
||||
"""
|
||||
return 'docker'
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_jail.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_jail.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'jail'
|
@ -0,0 +1,79 @@
|
||||
# coding: utf-8
|
||||
# Copyright 2018, Yannig Perré
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from ansible.errors import AnsibleConnectionFailure
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
import ansible_mitogen.loaders
|
||||
|
||||
|
||||
_class = ansible_mitogen.loaders.connection_loader__get(
|
||||
'kubectl',
|
||||
class_only=True,
|
||||
)
|
||||
|
||||
if _class:
|
||||
kubectl = sys.modules[_class.__module__]
|
||||
del _class
|
||||
else:
|
||||
kubectl = None
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'kubectl'
|
||||
|
||||
not_supported_msg = (
|
||||
'The "mitogen_kubectl" plug-in requires a version of Ansible '
|
||||
'that ships with the "kubectl" connection plug-in.'
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if kubectl is None:
|
||||
raise AnsibleConnectionFailure(self.not_supported_msg)
|
||||
super(Connection, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_extra_args(self):
|
||||
parameters = []
|
||||
for key, option in iteritems(kubectl.CONNECTION_OPTIONS):
|
||||
if self.get_task_var('ansible_' + key) is not None:
|
||||
parameters += [ option, self.get_task_var('ansible_' + key) ]
|
||||
|
||||
return parameters
|
86
mitogen/ansible_mitogen/plugins/connection/mitogen_local.py
Normal file
86
mitogen/ansible_mitogen/plugins/connection/mitogen_local.py
Normal file
@ -0,0 +1,86 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
import ansible_mitogen.process
|
||||
|
||||
|
||||
if sys.version_info > (3,):
|
||||
viewkeys = dict.keys
|
||||
elif sys.version_info > (2, 7):
|
||||
viewkeys = dict.viewkeys
|
||||
else:
|
||||
viewkeys = lambda dct: set(dct)
|
||||
|
||||
|
||||
def dict_diff(old, new):
|
||||
"""
|
||||
Return a dict representing the differences between the dicts `old` and
|
||||
`new`. Deleted keys appear as a key with the value :data:`None`, added and
|
||||
changed keys appear as a key with the new value.
|
||||
"""
|
||||
old_keys = viewkeys(old)
|
||||
new_keys = viewkeys(dict(new))
|
||||
out = {}
|
||||
for key in new_keys - old_keys:
|
||||
out[key] = new[key]
|
||||
for key in old_keys - new_keys:
|
||||
out[key] = None
|
||||
for key in old_keys & new_keys:
|
||||
if old[key] != new[key]:
|
||||
out[key] = new[key]
|
||||
return out
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'local'
|
||||
|
||||
def get_default_cwd(self):
|
||||
# https://github.com/ansible/ansible/issues/14489
|
||||
return self.loader_basedir
|
||||
|
||||
def get_default_env(self):
|
||||
"""
|
||||
Vanilla Ansible local commands execute with an environment inherited
|
||||
from WorkerProcess, we must emulate that.
|
||||
"""
|
||||
return dict_diff(
|
||||
old=ansible_mitogen.process.MuxProcess.cls_original_env,
|
||||
new=os.environ,
|
||||
)
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_lxc.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_lxc.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'lxc'
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_lxd.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_lxd.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'lxd'
|
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'machinectl'
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_setns.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_setns.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'setns'
|
67
mitogen/ansible_mitogen/plugins/connection/mitogen_ssh.py
Normal file
67
mitogen/ansible_mitogen/plugins/connection/mitogen_ssh.py
Normal file
@ -0,0 +1,67 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
DOCUMENTATION = """
|
||||
author: David Wilson <dw@botanicus.net>
|
||||
connection: mitogen_ssh
|
||||
short_description: Connect over SSH via Mitogen
|
||||
description:
|
||||
- This connects using an OpenSSH client controlled by the Mitogen for
|
||||
Ansible extension. It accepts every option the vanilla ssh plugin
|
||||
accepts.
|
||||
version_added: "2.5"
|
||||
options:
|
||||
"""
|
||||
|
||||
try:
|
||||
import ansible_mitogen
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
import ansible_mitogen.loaders
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'ssh'
|
||||
vanilla_class = ansible_mitogen.loaders.connection_loader__get(
|
||||
'ssh',
|
||||
class_only=True,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _create_control_path(*args, **kwargs):
|
||||
"""Forward _create_control_path() to the implementation in ssh.py."""
|
||||
# https://github.com/dw/mitogen/issues/342
|
||||
return Connection.vanilla_class._create_control_path(*args, **kwargs)
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_su.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_su.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'mitogen_su'
|
44
mitogen/ansible_mitogen/plugins/connection/mitogen_sudo.py
Normal file
44
mitogen/ansible_mitogen/plugins/connection/mitogen_sudo.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
try:
|
||||
import ansible_mitogen.connection
|
||||
except ImportError:
|
||||
base_dir = os.path.dirname(__file__)
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||
del base_dir
|
||||
|
||||
import ansible_mitogen.connection
|
||||
|
||||
|
||||
class Connection(ansible_mitogen.connection.Connection):
|
||||
transport = 'mitogen_sudo'
|
Binary file not shown.
61
mitogen/ansible_mitogen/plugins/strategy/mitogen.py
Normal file
61
mitogen/ansible_mitogen/plugins/strategy/mitogen.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
#
|
||||
# This is not the real Strategy implementation module, it simply exists as a
|
||||
# proxy to the real module, which is loaded using Python's regular import
|
||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||
#
|
||||
# Therefore we have a proxy module that imports it under the real name, and
|
||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||
# the real module, so duplicate types don't exist in memory, and things like
|
||||
# debuggers and isinstance() work predictably.
|
||||
#
|
||||
|
||||
BASE_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../../..')
|
||||
)
|
||||
|
||||
if BASE_DIR not in sys.path:
|
||||
sys.path.insert(0, BASE_DIR)
|
||||
|
||||
import ansible_mitogen.strategy
|
||||
import ansible.plugins.strategy.linear
|
||||
|
||||
|
||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin,
|
||||
ansible.plugins.strategy.linear.StrategyModule):
|
||||
pass
|
62
mitogen/ansible_mitogen/plugins/strategy/mitogen_free.py
Normal file
62
mitogen/ansible_mitogen/plugins/strategy/mitogen_free.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
#
|
||||
# This is not the real Strategy implementation module, it simply exists as a
|
||||
# proxy to the real module, which is loaded using Python's regular import
|
||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||
#
|
||||
# Therefore we have a proxy module that imports it under the real name, and
|
||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||
# the real module, so duplicate types don't exist in memory, and things like
|
||||
# debuggers and isinstance() work predictably.
|
||||
#
|
||||
|
||||
BASE_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../../..')
|
||||
)
|
||||
|
||||
if BASE_DIR not in sys.path:
|
||||
sys.path.insert(0, BASE_DIR)
|
||||
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.strategy
|
||||
|
||||
|
||||
Base = ansible_mitogen.loaders.strategy_loader.get('free', class_only=True)
|
||||
|
||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||
pass
|
@ -0,0 +1,67 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
#
|
||||
# This is not the real Strategy implementation module, it simply exists as a
|
||||
# proxy to the real module, which is loaded using Python's regular import
|
||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||
#
|
||||
# Therefore we have a proxy module that imports it under the real name, and
|
||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||
# the real module, so duplicate types don't exist in memory, and things like
|
||||
# debuggers and isinstance() work predictably.
|
||||
#
|
||||
|
||||
BASE_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../../..')
|
||||
)
|
||||
|
||||
if BASE_DIR not in sys.path:
|
||||
sys.path.insert(0, BASE_DIR)
|
||||
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.strategy
|
||||
|
||||
|
||||
Base = ansible_mitogen.loaders.strategy_loader.get('host_pinned', class_only=True)
|
||||
|
||||
if Base is None:
|
||||
raise ImportError(
|
||||
'The host_pinned strategy is only available in Ansible 2.7 or newer.'
|
||||
)
|
||||
|
||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||
pass
|
62
mitogen/ansible_mitogen/plugins/strategy/mitogen_linear.py
Normal file
62
mitogen/ansible_mitogen/plugins/strategy/mitogen_linear.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
#
|
||||
# This is not the real Strategy implementation module, it simply exists as a
|
||||
# proxy to the real module, which is loaded using Python's regular import
|
||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||
#
|
||||
# Therefore we have a proxy module that imports it under the real name, and
|
||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||
# the real module, so duplicate types don't exist in memory, and things like
|
||||
# debuggers and isinstance() work predictably.
|
||||
#
|
||||
|
||||
BASE_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../../..')
|
||||
)
|
||||
|
||||
if BASE_DIR not in sys.path:
|
||||
sys.path.insert(0, BASE_DIR)
|
||||
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.strategy
|
||||
|
||||
|
||||
Base = ansible_mitogen.loaders.strategy_loader.get('linear', class_only=True)
|
||||
|
||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||
pass
|
745
mitogen/ansible_mitogen/process.py
Normal file
745
mitogen/ansible_mitogen/process.py
Normal file
@ -0,0 +1,745 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import atexit
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import resource
|
||||
import socket
|
||||
import signal
|
||||
import sys
|
||||
|
||||
try:
|
||||
import faulthandler
|
||||
except ImportError:
|
||||
faulthandler = None
|
||||
|
||||
try:
|
||||
import setproctitle
|
||||
except ImportError:
|
||||
setproctitle = None
|
||||
|
||||
import mitogen
|
||||
import mitogen.core
|
||||
import mitogen.debug
|
||||
import mitogen.fork
|
||||
import mitogen.master
|
||||
import mitogen.parent
|
||||
import mitogen.service
|
||||
import mitogen.unix
|
||||
import mitogen.utils
|
||||
|
||||
import ansible
|
||||
import ansible.constants as C
|
||||
import ansible.errors
|
||||
import ansible_mitogen.logging
|
||||
import ansible_mitogen.services
|
||||
|
||||
from mitogen.core import b
|
||||
import ansible_mitogen.affinity
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
ANSIBLE_PKG_OVERRIDE = (
|
||||
u"__version__ = %r\n"
|
||||
u"__author__ = %r\n"
|
||||
)
|
||||
|
||||
MAX_MESSAGE_SIZE = 4096 * 1048576
|
||||
|
||||
worker_model_msg = (
|
||||
'Mitogen connection types may only be instantiated when one of the '
|
||||
'"mitogen_*" or "operon_*" strategies are active.'
|
||||
)
|
||||
|
||||
shutting_down_msg = (
|
||||
'The task worker cannot connect. Ansible may be shutting down, or '
|
||||
'the maximum open files limit may have been exceeded. If this occurs '
|
||||
'midway through a run, please retry after increasing the open file '
|
||||
'limit (ulimit -n). Original error: %s'
|
||||
)
|
||||
|
||||
|
||||
#: The worker model as configured by the currently running strategy. This is
|
||||
#: managed via :func:`get_worker_model` / :func:`set_worker_model` functions by
|
||||
#: :class:`StrategyMixin`.
|
||||
_worker_model = None
|
||||
|
||||
|
||||
#: A copy of the sole :class:`ClassicWorkerModel` that ever exists during a
|
||||
#: classic run, as return by :func:`get_classic_worker_model`.
|
||||
_classic_worker_model = None
|
||||
|
||||
|
||||
def set_worker_model(model):
|
||||
"""
|
||||
To remove process model-wiring from
|
||||
:class:`ansible_mitogen.connection.Connection`, it is necessary to track
|
||||
some idea of the configured execution environment outside the connection
|
||||
plug-in.
|
||||
|
||||
That is what :func:`set_worker_model` and :func:`get_worker_model` are for.
|
||||
"""
|
||||
global _worker_model
|
||||
assert model is None or _worker_model is None
|
||||
_worker_model = model
|
||||
|
||||
|
||||
def get_worker_model():
|
||||
"""
|
||||
Return the :class:`WorkerModel` currently configured by the running
|
||||
strategy.
|
||||
"""
|
||||
if _worker_model is None:
|
||||
raise ansible.errors.AnsibleConnectionFailure(worker_model_msg)
|
||||
return _worker_model
|
||||
|
||||
|
||||
def get_classic_worker_model(**kwargs):
|
||||
"""
|
||||
Return the single :class:`ClassicWorkerModel` instance, constructing it if
|
||||
necessary.
|
||||
"""
|
||||
global _classic_worker_model
|
||||
assert _classic_worker_model is None or (not kwargs), \
|
||||
"ClassicWorkerModel kwargs supplied but model already constructed"
|
||||
|
||||
if _classic_worker_model is None:
|
||||
_classic_worker_model = ClassicWorkerModel(**kwargs)
|
||||
return _classic_worker_model
|
||||
|
||||
|
||||
def getenv_int(key, default=0):
|
||||
"""
|
||||
Get an integer-valued environment variable `key`, if it exists and parses
|
||||
as an integer, otherwise return `default`.
|
||||
"""
|
||||
try:
|
||||
return int(os.environ.get(key, str(default)))
|
||||
except ValueError:
|
||||
return default
|
||||
|
||||
|
||||
def save_pid(name):
|
||||
"""
|
||||
When debugging and profiling, it is very annoying to poke through the
|
||||
process list to discover the currently running Ansible and MuxProcess IDs,
|
||||
especially when trying to catch an issue during early startup. So here, if
|
||||
a magic environment variable set, stash them in hidden files in the CWD::
|
||||
|
||||
alias muxpid="cat .ansible-mux.pid"
|
||||
alias anspid="cat .ansible-controller.pid"
|
||||
|
||||
gdb -p $(muxpid)
|
||||
perf top -p $(anspid)
|
||||
"""
|
||||
if os.environ.get('MITOGEN_SAVE_PIDS'):
|
||||
with open('.ansible-%s.pid' % (name,), 'w') as fp:
|
||||
fp.write(str(os.getpid()))
|
||||
|
||||
|
||||
def setup_pool(pool):
|
||||
"""
|
||||
Configure a connection multiplexer's :class:`mitogen.service.Pool` with
|
||||
services accessed by clients and WorkerProcesses.
|
||||
"""
|
||||
pool.add(mitogen.service.FileService(router=pool.router))
|
||||
pool.add(mitogen.service.PushFileService(router=pool.router))
|
||||
pool.add(ansible_mitogen.services.ContextService(router=pool.router))
|
||||
pool.add(ansible_mitogen.services.ModuleDepService(pool.router))
|
||||
LOG.debug('Service pool configured: size=%d', pool.size)
|
||||
|
||||
|
||||
def _setup_simplejson(responder):
|
||||
"""
|
||||
We support serving simplejson for Python 2.4 targets on Ansible 2.3, at
|
||||
least so the package's own CI Docker scripts can run without external
|
||||
help, however newer versions of simplejson no longer support Python
|
||||
2.4. Therefore override any installed/loaded version with a
|
||||
2.4-compatible version we ship in the compat/ directory.
|
||||
"""
|
||||
responder.whitelist_prefix('simplejson')
|
||||
|
||||
# issue #536: must be at end of sys.path, in case existing newer
|
||||
# version is already loaded.
|
||||
compat_path = os.path.join(os.path.dirname(__file__), 'compat')
|
||||
sys.path.append(compat_path)
|
||||
|
||||
for fullname, is_pkg, suffix in (
|
||||
(u'simplejson', True, '__init__.py'),
|
||||
(u'simplejson.decoder', False, 'decoder.py'),
|
||||
(u'simplejson.encoder', False, 'encoder.py'),
|
||||
(u'simplejson.scanner', False, 'scanner.py'),
|
||||
):
|
||||
path = os.path.join(compat_path, 'simplejson', suffix)
|
||||
fp = open(path, 'rb')
|
||||
try:
|
||||
source = fp.read()
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
responder.add_source_override(
|
||||
fullname=fullname,
|
||||
path=path,
|
||||
source=source,
|
||||
is_pkg=is_pkg,
|
||||
)
|
||||
|
||||
|
||||
def _setup_responder(responder):
|
||||
"""
|
||||
Configure :class:`mitogen.master.ModuleResponder` to only permit
|
||||
certain packages, and to generate custom responses for certain modules.
|
||||
"""
|
||||
responder.whitelist_prefix('ansible')
|
||||
responder.whitelist_prefix('ansible_mitogen')
|
||||
_setup_simplejson(responder)
|
||||
|
||||
# Ansible 2.3 is compatible with Python 2.4 targets, however
|
||||
# ansible/__init__.py is not. Instead, executor/module_common.py writes
|
||||
# out a 2.4-compatible namespace package for unknown reasons. So we
|
||||
# copy it here.
|
||||
responder.add_source_override(
|
||||
fullname='ansible',
|
||||
path=ansible.__file__,
|
||||
source=(ANSIBLE_PKG_OVERRIDE % (
|
||||
ansible.__version__,
|
||||
ansible.__author__,
|
||||
)).encode(),
|
||||
is_pkg=True,
|
||||
)
|
||||
|
||||
|
||||
def increase_open_file_limit():
|
||||
"""
|
||||
#549: in order to reduce the possibility of hitting an open files limit,
|
||||
increase :data:`resource.RLIMIT_NOFILE` from its soft limit to its hard
|
||||
limit, if they differ.
|
||||
|
||||
It is common that a low soft limit is configured by default, where the hard
|
||||
limit is much higher.
|
||||
"""
|
||||
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
if hard == resource.RLIM_INFINITY:
|
||||
hard_s = '(infinity)'
|
||||
# cap in case of O(RLIMIT_NOFILE) algorithm in some subprocess.
|
||||
hard = 524288
|
||||
else:
|
||||
hard_s = str(hard)
|
||||
|
||||
LOG.debug('inherited open file limits: soft=%d hard=%s', soft, hard_s)
|
||||
if soft >= hard:
|
||||
LOG.debug('max open files already set to hard limit: %d', hard)
|
||||
return
|
||||
|
||||
# OS X is limited by kern.maxfilesperproc sysctl, rather than the
|
||||
# advertised unlimited hard RLIMIT_NOFILE. Just hard-wire known defaults
|
||||
# for that sysctl, to avoid the mess of querying it.
|
||||
for value in (hard, 10240):
|
||||
try:
|
||||
resource.setrlimit(resource.RLIMIT_NOFILE, (value, hard))
|
||||
LOG.debug('raised soft open file limit from %d to %d', soft, value)
|
||||
break
|
||||
except ValueError as e:
|
||||
LOG.debug('could not raise soft open file limit from %d to %d: %s',
|
||||
soft, value, e)
|
||||
|
||||
|
||||
def common_setup(enable_affinity=True, _init_logging=True):
|
||||
save_pid('controller')
|
||||
ansible_mitogen.logging.set_process_name('top')
|
||||
|
||||
if _init_logging:
|
||||
ansible_mitogen.logging.setup()
|
||||
|
||||
if enable_affinity:
|
||||
ansible_mitogen.affinity.policy.assign_controller()
|
||||
|
||||
mitogen.utils.setup_gil()
|
||||
if faulthandler is not None:
|
||||
faulthandler.enable()
|
||||
|
||||
MuxProcess.profiling = getenv_int('MITOGEN_PROFILING') > 0
|
||||
if MuxProcess.profiling:
|
||||
mitogen.core.enable_profiling()
|
||||
|
||||
MuxProcess.cls_original_env = dict(os.environ)
|
||||
increase_open_file_limit()
|
||||
|
||||
|
||||
def get_cpu_count(default=None):
|
||||
"""
|
||||
Get the multiplexer CPU count from the MITOGEN_CPU_COUNT environment
|
||||
variable, returning `default` if one isn't set, or is out of range.
|
||||
|
||||
:param int default:
|
||||
Default CPU, or :data:`None` to use all available CPUs.
|
||||
"""
|
||||
max_cpus = multiprocessing.cpu_count()
|
||||
if default is None:
|
||||
default = max_cpus
|
||||
|
||||
cpu_count = getenv_int('MITOGEN_CPU_COUNT', default=default)
|
||||
if cpu_count < 1 or cpu_count > max_cpus:
|
||||
cpu_count = default
|
||||
|
||||
return cpu_count
|
||||
|
||||
|
||||
class Broker(mitogen.master.Broker):
|
||||
"""
|
||||
WorkerProcess maintains at most 2 file descriptors, therefore does not need
|
||||
the exuberant syscall expense of EpollPoller, so override it and restore
|
||||
the poll() poller.
|
||||
"""
|
||||
poller_class = mitogen.core.Poller
|
||||
|
||||
|
||||
class Binding(object):
|
||||
"""
|
||||
Represent a bound connection for a particular inventory hostname. When
|
||||
operating in sharded mode, the actual MuxProcess implementing a connection
|
||||
varies according to the target machine. Depending on the particular
|
||||
implementation, this class represents a binding to the correct MuxProcess.
|
||||
"""
|
||||
def get_child_service_context(self):
|
||||
"""
|
||||
Return the :class:`mitogen.core.Context` to which children should
|
||||
direct requests for services such as FileService, or :data:`None` for
|
||||
the local process.
|
||||
|
||||
This can be different from :meth:`get_service_context` where MuxProcess
|
||||
and WorkerProcess are combined, and it is discovered a task is
|
||||
delegated after being assigned to its initial worker for the original
|
||||
un-delegated hostname. In that case, connection management and
|
||||
expensive services like file transfer must be implemented by the
|
||||
MuxProcess connected to the target, rather than routed to the
|
||||
MuxProcess responsible for executing the task.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_service_context(self):
|
||||
"""
|
||||
Return the :class:`mitogen.core.Context` to which this process should
|
||||
direct ContextService requests, or :data:`None` for the local process.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Finalize any associated resources.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class WorkerModel(object):
|
||||
"""
|
||||
Interface used by StrategyMixin to manage various Mitogen services, by
|
||||
default running in one or more connection multiplexer subprocesses spawned
|
||||
off the top-level Ansible process.
|
||||
"""
|
||||
def on_strategy_start(self):
|
||||
"""
|
||||
Called prior to strategy start in the top-level process. Responsible
|
||||
for preparing any worker/connection multiplexer state.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def on_strategy_complete(self):
|
||||
"""
|
||||
Called after strategy completion in the top-level process. Must place
|
||||
Ansible back in a "compatible" state where any other strategy plug-in
|
||||
may execute.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_binding(self, inventory_name):
|
||||
"""
|
||||
Return a :class:`Binding` to access Mitogen services for
|
||||
`inventory_name`. Usually called from worker processes, but may also be
|
||||
called from top-level process to handle "meta: reset_connection".
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ClassicBinding(Binding):
|
||||
"""
|
||||
Only one connection may be active at a time in a classic worker, so its
|
||||
binding just provides forwarders back to :class:`ClassicWorkerModel`.
|
||||
"""
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
|
||||
def get_service_context(self):
|
||||
"""
|
||||
See Binding.get_service_context().
|
||||
"""
|
||||
return self.model.parent
|
||||
|
||||
def get_child_service_context(self):
|
||||
"""
|
||||
See Binding.get_child_service_context().
|
||||
"""
|
||||
return self.model.parent
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
See Binding.close().
|
||||
"""
|
||||
self.model.on_binding_close()
|
||||
|
||||
|
||||
class ClassicWorkerModel(WorkerModel):
|
||||
#: In the top-level process, this references one end of a socketpair(),
|
||||
#: whose other end child MuxProcesses block reading from to determine when
|
||||
#: the master process dies. When the top-level exits abnormally, or
|
||||
#: normally but where :func:`_on_process_exit` has been called, this socket
|
||||
#: will be closed, causing all the children to wake.
|
||||
parent_sock = None
|
||||
|
||||
#: In the mux process, this is the other end of :attr:`cls_parent_sock`.
|
||||
#: The main thread blocks on a read from it until :attr:`cls_parent_sock`
|
||||
#: is closed.
|
||||
child_sock = None
|
||||
|
||||
#: mitogen.master.Router for this worker.
|
||||
router = None
|
||||
|
||||
#: mitogen.master.Broker for this worker.
|
||||
broker = None
|
||||
|
||||
#: Name of multiplexer process socket we are currently connected to.
|
||||
listener_path = None
|
||||
|
||||
#: mitogen.parent.Context representing the parent Context, which is the
|
||||
#: connection multiplexer process when running in classic mode, or the
|
||||
#: top-level process when running a new-style mode.
|
||||
parent = None
|
||||
|
||||
def __init__(self, _init_logging=True):
|
||||
"""
|
||||
Arrange for classic model multiplexers to be started. The parent choses
|
||||
UNIX socket paths each child will use prior to fork, creates a
|
||||
socketpair used essentially as a semaphore, then blocks waiting for the
|
||||
child to indicate the UNIX socket is ready for use.
|
||||
|
||||
:param bool _init_logging:
|
||||
For testing, if :data:`False`, don't initialize logging.
|
||||
"""
|
||||
# #573: The process ID that installed the :mod:`atexit` handler. If
|
||||
# some unknown Ansible plug-in forks the Ansible top-level process and
|
||||
# later performs a graceful Python exit, it may try to wait for child
|
||||
# PIDs it never owned, causing a crash. We want to avoid that.
|
||||
self._pid = os.getpid()
|
||||
|
||||
common_setup(_init_logging=_init_logging)
|
||||
|
||||
self.parent_sock, self.child_sock = socket.socketpair()
|
||||
mitogen.core.set_cloexec(self.parent_sock.fileno())
|
||||
mitogen.core.set_cloexec(self.child_sock.fileno())
|
||||
|
||||
self._muxes = [
|
||||
MuxProcess(self, index)
|
||||
for index in range(get_cpu_count(default=1))
|
||||
]
|
||||
for mux in self._muxes:
|
||||
mux.start()
|
||||
|
||||
atexit.register(self._on_process_exit)
|
||||
self.child_sock.close()
|
||||
self.child_sock = None
|
||||
|
||||
def _listener_for_name(self, name):
|
||||
"""
|
||||
Given an inventory hostname, return the UNIX listener that should
|
||||
communicate with it. This is a simple hash of the inventory name.
|
||||
"""
|
||||
mux = self._muxes[abs(hash(name)) % len(self._muxes)]
|
||||
LOG.debug('will use multiplexer %d (%s) to connect to "%s"',
|
||||
mux.index, mux.path, name)
|
||||
return mux.path
|
||||
|
||||
def _reconnect(self, path):
|
||||
if self.router is not None:
|
||||
# Router can just be overwritten, but the previous parent
|
||||
# connection must explicitly be removed from the broker first.
|
||||
self.router.disconnect(self.parent)
|
||||
self.parent = None
|
||||
self.router = None
|
||||
|
||||
try:
|
||||
self.router, self.parent = mitogen.unix.connect(
|
||||
path=path,
|
||||
broker=self.broker,
|
||||
)
|
||||
except mitogen.unix.ConnectError as e:
|
||||
# This is not AnsibleConnectionFailure since we want to break
|
||||
# with_items loops.
|
||||
raise ansible.errors.AnsibleError(shutting_down_msg % (e,))
|
||||
|
||||
self.router.max_message_size = MAX_MESSAGE_SIZE
|
||||
self.listener_path = path
|
||||
|
||||
def _on_process_exit(self):
|
||||
"""
|
||||
This is an :mod:`atexit` handler installed in the top-level process.
|
||||
|
||||
Shut the write end of `sock`, causing the receive side of the socket in
|
||||
every :class:`MuxProcess` to return 0-byte reads, and causing their
|
||||
main threads to wake and initiate shutdown. After shutting the socket
|
||||
down, wait on each child to finish exiting.
|
||||
|
||||
This is done using :mod:`atexit` since Ansible lacks any better hook to
|
||||
run code during exit, and unless some synchronization exists with
|
||||
MuxProcess, debug logs may appear on the user's terminal *after* the
|
||||
prompt has been printed.
|
||||
"""
|
||||
if self._pid != os.getpid():
|
||||
return
|
||||
|
||||
try:
|
||||
self.parent_sock.shutdown(socket.SHUT_WR)
|
||||
except socket.error:
|
||||
# Already closed. This is possible when tests are running.
|
||||
LOG.debug('_on_process_exit: ignoring duplicate call')
|
||||
return
|
||||
|
||||
mitogen.core.io_op(self.parent_sock.recv, 1)
|
||||
self.parent_sock.close()
|
||||
|
||||
for mux in self._muxes:
|
||||
_, status = os.waitpid(mux.pid, 0)
|
||||
status = mitogen.fork._convert_exit_status(status)
|
||||
LOG.debug('multiplexer %d PID %d %s', mux.index, mux.pid,
|
||||
mitogen.parent.returncode_to_str(status))
|
||||
|
||||
def _test_reset(self):
|
||||
"""
|
||||
Used to clean up in unit tests.
|
||||
"""
|
||||
self.on_binding_close()
|
||||
self._on_process_exit()
|
||||
set_worker_model(None)
|
||||
|
||||
global _classic_worker_model
|
||||
_classic_worker_model = None
|
||||
|
||||
def on_strategy_start(self):
|
||||
"""
|
||||
See WorkerModel.on_strategy_start().
|
||||
"""
|
||||
|
||||
def on_strategy_complete(self):
|
||||
"""
|
||||
See WorkerModel.on_strategy_complete().
|
||||
"""
|
||||
|
||||
def get_binding(self, inventory_name):
|
||||
"""
|
||||
See WorkerModel.get_binding().
|
||||
"""
|
||||
if self.broker is None:
|
||||
self.broker = Broker()
|
||||
|
||||
path = self._listener_for_name(inventory_name)
|
||||
if path != self.listener_path:
|
||||
self._reconnect(path)
|
||||
|
||||
return ClassicBinding(self)
|
||||
|
||||
def on_binding_close(self):
|
||||
if not self.broker:
|
||||
return
|
||||
|
||||
self.broker.shutdown()
|
||||
self.broker.join()
|
||||
self.router = None
|
||||
self.broker = None
|
||||
self.parent = None
|
||||
self.listener_path = None
|
||||
|
||||
# #420: Ansible executes "meta" actions in the top-level process,
|
||||
# meaning "reset_connection" will cause :class:`mitogen.core.Latch` FDs
|
||||
# to be cached and erroneously shared by children on subsequent
|
||||
# WorkerProcess forks. To handle that, call on_fork() to ensure any
|
||||
# shared state is discarded.
|
||||
# #490: only attempt to clean up when it's known that some resources
|
||||
# exist to cleanup, otherwise later __del__ double-call to close() due
|
||||
# to GC at random moment may obliterate an unrelated Connection's
|
||||
# related resources.
|
||||
mitogen.fork.on_fork()
|
||||
|
||||
|
||||
class MuxProcess(object):
|
||||
"""
|
||||
Implement a subprocess forked from the Ansible top-level, as a safe place
|
||||
to contain the Mitogen IO multiplexer thread, keeping its use of the
|
||||
logging package (and the logging package's heavy use of locks) far away
|
||||
from os.fork(), which is used continuously by the multiprocessing package
|
||||
in the top-level process.
|
||||
|
||||
The problem with running the multiplexer in that process is that should the
|
||||
multiplexer thread be in the process of emitting a log entry (and holding
|
||||
its lock) at the point of fork, in the child, the first attempt to log any
|
||||
log entry using the same handler will deadlock the child, as in the memory
|
||||
image the child received, the lock will always be marked held.
|
||||
|
||||
See https://bugs.python.org/issue6721 for a thorough description of the
|
||||
class of problems this worker is intended to avoid.
|
||||
"""
|
||||
#: A copy of :data:`os.environ` at the time the multiplexer process was
|
||||
#: started. It's used by mitogen_local.py to find changes made to the
|
||||
#: top-level environment (e.g. vars plugins -- issue #297) that must be
|
||||
#: applied to locally executed commands and modules.
|
||||
cls_original_env = None
|
||||
|
||||
def __init__(self, model, index):
|
||||
#: :class:`ClassicWorkerModel` instance we were created by.
|
||||
self.model = model
|
||||
#: MuxProcess CPU index.
|
||||
self.index = index
|
||||
#: Individual path of this process.
|
||||
self.path = mitogen.unix.make_socket_path()
|
||||
|
||||
def start(self):
|
||||
self.pid = os.fork()
|
||||
if self.pid:
|
||||
# Wait for child to boot before continuing.
|
||||
mitogen.core.io_op(self.model.parent_sock.recv, 1)
|
||||
return
|
||||
|
||||
ansible_mitogen.logging.set_process_name('mux:' + str(self.index))
|
||||
if setproctitle:
|
||||
setproctitle.setproctitle('mitogen mux:%s (%s)' % (
|
||||
self.index,
|
||||
os.path.basename(self.path),
|
||||
))
|
||||
|
||||
self.model.parent_sock.close()
|
||||
self.model.parent_sock = None
|
||||
try:
|
||||
try:
|
||||
self.worker_main()
|
||||
except Exception:
|
||||
LOG.exception('worker_main() crashed')
|
||||
finally:
|
||||
sys.exit()
|
||||
|
||||
def worker_main(self):
|
||||
"""
|
||||
The main function of the mux process: setup the Mitogen broker thread
|
||||
and ansible_mitogen services, then sleep waiting for the socket
|
||||
connected to the parent to be closed (indicating the parent has died).
|
||||
"""
|
||||
save_pid('mux')
|
||||
|
||||
# #623: MuxProcess ignores SIGINT because it wants to live until every
|
||||
# Ansible worker process has been cleaned up by
|
||||
# TaskQueueManager.cleanup(), otherwise harmles yet scary warnings
|
||||
# about being unable connect to MuxProess could be printed.
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
ansible_mitogen.logging.set_process_name('mux')
|
||||
ansible_mitogen.affinity.policy.assign_muxprocess(self.index)
|
||||
|
||||
self._setup_master()
|
||||
self._setup_services()
|
||||
|
||||
try:
|
||||
# Let the parent know our listening socket is ready.
|
||||
mitogen.core.io_op(self.model.child_sock.send, b('1'))
|
||||
# Block until the socket is closed, which happens on parent exit.
|
||||
mitogen.core.io_op(self.model.child_sock.recv, 1)
|
||||
finally:
|
||||
self.broker.shutdown()
|
||||
self.broker.join()
|
||||
|
||||
# Test frameworks living somewhere higher on the stack of the
|
||||
# original parent process may try to catch sys.exit(), so do a C
|
||||
# level exit instead.
|
||||
os._exit(0)
|
||||
|
||||
def _enable_router_debug(self):
|
||||
if 'MITOGEN_ROUTER_DEBUG' in os.environ:
|
||||
self.router.enable_debug()
|
||||
|
||||
def _enable_stack_dumps(self):
|
||||
secs = getenv_int('MITOGEN_DUMP_THREAD_STACKS', default=0)
|
||||
if secs:
|
||||
mitogen.debug.dump_to_logger(secs=secs)
|
||||
|
||||
def _setup_master(self):
|
||||
"""
|
||||
Construct a Router, Broker, and mitogen.unix listener
|
||||
"""
|
||||
self.broker = mitogen.master.Broker(install_watcher=False)
|
||||
self.router = mitogen.master.Router(
|
||||
broker=self.broker,
|
||||
max_message_size=MAX_MESSAGE_SIZE,
|
||||
)
|
||||
_setup_responder(self.router.responder)
|
||||
mitogen.core.listen(self.broker, 'shutdown', self._on_broker_shutdown)
|
||||
mitogen.core.listen(self.broker, 'exit', self._on_broker_exit)
|
||||
self.listener = mitogen.unix.Listener.build_stream(
|
||||
router=self.router,
|
||||
path=self.path,
|
||||
backlog=C.DEFAULT_FORKS,
|
||||
)
|
||||
self._enable_router_debug()
|
||||
self._enable_stack_dumps()
|
||||
|
||||
def _setup_services(self):
|
||||
"""
|
||||
Construct a ContextService and a thread to service requests for it
|
||||
arriving from worker processes.
|
||||
"""
|
||||
self.pool = mitogen.service.Pool(
|
||||
router=self.router,
|
||||
size=getenv_int('MITOGEN_POOL_SIZE', default=32),
|
||||
)
|
||||
setup_pool(self.pool)
|
||||
|
||||
def _on_broker_shutdown(self):
|
||||
"""
|
||||
Respond to broker shutdown by shutting down the pool. Do not join on it
|
||||
yet, since that would block the broker thread which then cannot clean
|
||||
up pending handlers and connections, which is required for the threads
|
||||
to exit gracefully.
|
||||
"""
|
||||
self.pool.stop(join=False)
|
||||
|
||||
def _on_broker_exit(self):
|
||||
"""
|
||||
Respond to the broker thread about to exit by finally joining on the
|
||||
pool. This is safe since pools only block in connection attempts, and
|
||||
connection attempts fail with CancelledError when broker shutdown
|
||||
begins.
|
||||
"""
|
||||
self.pool.join()
|
1020
mitogen/ansible_mitogen/runner.py
Normal file
1020
mitogen/ansible_mitogen/runner.py
Normal file
File diff suppressed because it is too large
Load Diff
559
mitogen/ansible_mitogen/services.py
Normal file
559
mitogen/ansible_mitogen/services.py
Normal file
@ -0,0 +1,559 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# !mitogen: minify_safe
|
||||
|
||||
"""
|
||||
Classes in this file define Mitogen 'services' that run (initially) within the
|
||||
connection multiplexer process that is forked off the top-level controller
|
||||
process.
|
||||
|
||||
Once a worker process connects to a multiplexer process
|
||||
(Connection._connect()), it communicates with these services to establish new
|
||||
connections, grant access to files by children, and register for notification
|
||||
when a child has completed a job.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import ansible.constants
|
||||
|
||||
import mitogen
|
||||
import mitogen.service
|
||||
import mitogen.utils
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.module_finder
|
||||
import ansible_mitogen.target
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# Force load of plugin to ensure ConfigManager has definitions loaded. Done
|
||||
# during module import to ensure a single-threaded environment; PluginLoader
|
||||
# is not thread-safe.
|
||||
ansible_mitogen.loaders.shell_loader.get('sh')
|
||||
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
def reraise(tp, value, tb):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
else:
|
||||
exec(
|
||||
"def reraise(tp, value, tb=None):\n"
|
||||
" raise tp, value, tb\n"
|
||||
)
|
||||
|
||||
|
||||
def _get_candidate_temp_dirs():
|
||||
try:
|
||||
# >=2.5
|
||||
options = ansible.constants.config.get_plugin_options('shell', 'sh')
|
||||
remote_tmp = options.get('remote_tmp') or ansible.constants.DEFAULT_REMOTE_TMP
|
||||
system_tmpdirs = options.get('system_tmpdirs', ('/var/tmp', '/tmp'))
|
||||
except AttributeError:
|
||||
# 2.3
|
||||
remote_tmp = ansible.constants.DEFAULT_REMOTE_TMP
|
||||
system_tmpdirs = ('/var/tmp', '/tmp')
|
||||
|
||||
return mitogen.utils.cast([remote_tmp] + list(system_tmpdirs))
|
||||
|
||||
|
||||
def key_from_dict(**kwargs):
|
||||
"""
|
||||
Return a unique string representation of a dict as quickly as possible.
|
||||
Used to generated deduplication keys from a request.
|
||||
"""
|
||||
out = []
|
||||
stack = [kwargs]
|
||||
while stack:
|
||||
obj = stack.pop()
|
||||
if isinstance(obj, dict):
|
||||
stack.extend(sorted(obj.items()))
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
stack.extend(obj)
|
||||
else:
|
||||
out.append(str(obj))
|
||||
return ''.join(out)
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ContextService(mitogen.service.Service):
|
||||
"""
|
||||
Used by workers to fetch the single Context instance corresponding to a
|
||||
connection configuration, creating the matching connection if it does not
|
||||
exist.
|
||||
|
||||
For connection methods and their parameters, see:
|
||||
https://mitogen.readthedocs.io/en/latest/api.html#context-factories
|
||||
|
||||
This concentrates connections in the top-level process, which may become a
|
||||
bottleneck. The bottleneck can be removed using per-CPU connection
|
||||
processes and arranging for the worker to select one according to a hash of
|
||||
the connection parameters (sharding).
|
||||
"""
|
||||
max_interpreters = int(os.getenv('MITOGEN_MAX_INTERPRETERS', '20'))
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ContextService, self).__init__(*args, **kwargs)
|
||||
self._lock = threading.Lock()
|
||||
#: Records the :meth:`get` result dict for successful calls, returned
|
||||
#: for identical subsequent calls. Keyed by :meth:`key_from_dict`.
|
||||
self._response_by_key = {}
|
||||
#: List of :class:`mitogen.core.Latch` awaiting the result for a
|
||||
#: particular key.
|
||||
self._latches_by_key = {}
|
||||
#: Mapping of :class:`mitogen.core.Context` -> reference count. Each
|
||||
#: call to :meth:`get` increases this by one. Calls to :meth:`put`
|
||||
#: decrease it by one.
|
||||
self._refs_by_context = {}
|
||||
#: List of contexts in creation order by via= parameter. When
|
||||
#: :attr:`max_interpreters` is reached, the most recently used context
|
||||
#: is destroyed to make room for any additional context.
|
||||
self._lru_by_via = {}
|
||||
#: :func:`key_from_dict` result by Context.
|
||||
self._key_by_context = {}
|
||||
#: Mapping of Context -> parent Context
|
||||
self._via_by_context = {}
|
||||
|
||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||
@mitogen.service.arg_spec({
|
||||
'stack': list,
|
||||
})
|
||||
def reset(self, stack):
|
||||
"""
|
||||
Return a reference, forcing close and discard of the underlying
|
||||
connection. Used for 'meta: reset_connection' or when some other error
|
||||
is detected.
|
||||
|
||||
:returns:
|
||||
:data:`True` if a connection was found to discard, otherwise
|
||||
:data:`False`.
|
||||
"""
|
||||
LOG.debug('%r.reset(%r)', self, stack)
|
||||
|
||||
l = mitogen.core.Latch()
|
||||
context = None
|
||||
with self._lock:
|
||||
for i, spec in enumerate(stack):
|
||||
key = key_from_dict(via=context, **spec)
|
||||
response = self._response_by_key.get(key)
|
||||
if response is None:
|
||||
LOG.debug('%r: could not find connection to shut down; '
|
||||
'failed at hop %d', self, i)
|
||||
return False
|
||||
|
||||
context = response['context']
|
||||
|
||||
mitogen.core.listen(context, 'disconnect', l.put)
|
||||
self._shutdown_unlocked(context)
|
||||
|
||||
# The timeout below is to turn a hang into a crash in case there is any
|
||||
# possible race between 'disconnect' signal subscription, and the child
|
||||
# abruptly disconnecting.
|
||||
l.get(timeout=30.0)
|
||||
return True
|
||||
|
||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||
@mitogen.service.arg_spec({
|
||||
'context': mitogen.core.Context
|
||||
})
|
||||
def put(self, context):
|
||||
"""
|
||||
Return a reference, making it eligable for recycling once its reference
|
||||
count reaches zero.
|
||||
"""
|
||||
LOG.debug('decrementing reference count for %r', context)
|
||||
self._lock.acquire()
|
||||
try:
|
||||
if self._refs_by_context.get(context, 0) == 0:
|
||||
LOG.warning('%r.put(%r): refcount was 0. shutdown_all called?',
|
||||
self, context)
|
||||
return
|
||||
self._refs_by_context[context] -= 1
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
def _produce_response(self, key, response):
|
||||
"""
|
||||
Reply to every waiting request matching a configuration key with a
|
||||
response dictionary, deleting the list of waiters when done.
|
||||
|
||||
:param str key:
|
||||
Result of :meth:`key_from_dict`
|
||||
:param dict response:
|
||||
Response dictionary
|
||||
:returns:
|
||||
Number of waiters that were replied to.
|
||||
"""
|
||||
self._lock.acquire()
|
||||
try:
|
||||
latches = self._latches_by_key.pop(key)
|
||||
count = len(latches)
|
||||
for latch in latches:
|
||||
latch.put(response)
|
||||
finally:
|
||||
self._lock.release()
|
||||
return count
|
||||
|
||||
def _forget_context_unlocked(self, context):
|
||||
key = self._key_by_context.get(context)
|
||||
if key is None:
|
||||
LOG.debug('%r: attempt to forget unknown %r', self, context)
|
||||
return
|
||||
|
||||
self._response_by_key.pop(key, None)
|
||||
self._latches_by_key.pop(key, None)
|
||||
self._key_by_context.pop(context, None)
|
||||
self._refs_by_context.pop(context, None)
|
||||
self._via_by_context.pop(context, None)
|
||||
self._lru_by_via.pop(context, None)
|
||||
|
||||
def _shutdown_unlocked(self, context, lru=None, new_context=None):
|
||||
"""
|
||||
Arrange for `context` to be shut down, and optionally add `new_context`
|
||||
to the LRU list while holding the lock.
|
||||
"""
|
||||
LOG.info('%r._shutdown_unlocked(): shutting down %r', self, context)
|
||||
context.shutdown()
|
||||
via = self._via_by_context.get(context)
|
||||
if via:
|
||||
lru = self._lru_by_via.get(via)
|
||||
if lru:
|
||||
if context in lru:
|
||||
lru.remove(context)
|
||||
if new_context:
|
||||
lru.append(new_context)
|
||||
self._forget_context_unlocked(context)
|
||||
|
||||
def _update_lru_unlocked(self, new_context, spec, via):
|
||||
"""
|
||||
Update the LRU ("MRU"?) list associated with the connection described
|
||||
by `kwargs`, destroying the most recently created context if the list
|
||||
is full. Finally add `new_context` to the list.
|
||||
"""
|
||||
self._via_by_context[new_context] = via
|
||||
|
||||
lru = self._lru_by_via.setdefault(via, [])
|
||||
if len(lru) < self.max_interpreters:
|
||||
lru.append(new_context)
|
||||
return
|
||||
|
||||
for context in reversed(lru):
|
||||
if self._refs_by_context[context] == 0:
|
||||
break
|
||||
else:
|
||||
LOG.warning('via=%r reached maximum number of interpreters, '
|
||||
'but they are all marked as in-use.', via)
|
||||
return
|
||||
|
||||
self._shutdown_unlocked(context, lru=lru, new_context=new_context)
|
||||
|
||||
def _update_lru(self, new_context, spec, via):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
self._update_lru_unlocked(new_context, spec, via)
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||
def dump(self):
|
||||
"""
|
||||
For testing, return a list of dicts describing every currently
|
||||
connected context.
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'context_name': context.name,
|
||||
'via': getattr(self._via_by_context.get(context),
|
||||
'name', None),
|
||||
'refs': self._refs_by_context.get(context),
|
||||
}
|
||||
for context, key in sorted(self._key_by_context.items(),
|
||||
key=lambda c_k: c_k[0].context_id)
|
||||
]
|
||||
|
||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||
def shutdown_all(self):
|
||||
"""
|
||||
For testing use, arrange for all connections to be shut down.
|
||||
"""
|
||||
self._lock.acquire()
|
||||
try:
|
||||
for context in list(self._key_by_context):
|
||||
self._shutdown_unlocked(context)
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
def _on_context_disconnect(self, context):
|
||||
"""
|
||||
Respond to Context disconnect event by deleting any record of the no
|
||||
longer reachable context. This method runs in the Broker thread and
|
||||
must not to block.
|
||||
"""
|
||||
self._lock.acquire()
|
||||
try:
|
||||
LOG.info('%r: Forgetting %r due to stream disconnect', self, context)
|
||||
self._forget_context_unlocked(context)
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
ALWAYS_PRELOAD = (
|
||||
'ansible.module_utils.basic',
|
||||
'ansible.module_utils.json_utils',
|
||||
'ansible.release',
|
||||
'ansible_mitogen.runner',
|
||||
'ansible_mitogen.target',
|
||||
'mitogen.fork',
|
||||
'mitogen.service',
|
||||
)
|
||||
|
||||
def _send_module_forwards(self, context):
|
||||
if hasattr(self.router.responder, 'forward_modules'):
|
||||
self.router.responder.forward_modules(context, self.ALWAYS_PRELOAD)
|
||||
|
||||
_candidate_temp_dirs = None
|
||||
|
||||
def _get_candidate_temp_dirs(self):
|
||||
"""
|
||||
Return a list of locations to try to create the single temporary
|
||||
directory used by the run. This simply caches the (expensive) plugin
|
||||
load of :func:`_get_candidate_temp_dirs`.
|
||||
"""
|
||||
if self._candidate_temp_dirs is None:
|
||||
self._candidate_temp_dirs = _get_candidate_temp_dirs()
|
||||
return self._candidate_temp_dirs
|
||||
|
||||
def _connect(self, key, spec, via=None):
|
||||
"""
|
||||
Actual connect implementation. Arranges for the Mitogen connection to
|
||||
be created and enqueues an asynchronous call to start the forked task
|
||||
parent in the remote context.
|
||||
|
||||
:param key:
|
||||
Deduplication key representing the connection configuration.
|
||||
:param spec:
|
||||
Connection specification.
|
||||
:returns:
|
||||
Dict like::
|
||||
|
||||
{
|
||||
'context': mitogen.core.Context or None,
|
||||
'via': mitogen.core.Context or None,
|
||||
'init_child_result': {
|
||||
'fork_context': mitogen.core.Context,
|
||||
'home_dir': str or None,
|
||||
},
|
||||
'msg': str or None
|
||||
}
|
||||
|
||||
Where `context` is a reference to the newly constructed context,
|
||||
`init_child_result` is the result of executing
|
||||
:func:`ansible_mitogen.target.init_child` in that context, `msg` is
|
||||
an error message and the remaining fields are :data:`None`, or
|
||||
`msg` is :data:`None` and the remaining fields are set.
|
||||
"""
|
||||
try:
|
||||
method = getattr(self.router, spec['method'])
|
||||
except AttributeError:
|
||||
raise Error('unsupported method: %(method)s' % spec)
|
||||
|
||||
context = method(via=via, unidirectional=True, **spec['kwargs'])
|
||||
if via and spec.get('enable_lru'):
|
||||
self._update_lru(context, spec, via)
|
||||
|
||||
# Forget the context when its disconnect event fires.
|
||||
mitogen.core.listen(context, 'disconnect',
|
||||
lambda: self._on_context_disconnect(context))
|
||||
|
||||
self._send_module_forwards(context)
|
||||
init_child_result = context.call(
|
||||
ansible_mitogen.target.init_child,
|
||||
log_level=LOG.getEffectiveLevel(),
|
||||
candidate_temp_dirs=self._get_candidate_temp_dirs(),
|
||||
)
|
||||
|
||||
if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'):
|
||||
from mitogen import debug
|
||||
context.call(debug.dump_to_logger)
|
||||
|
||||
self._key_by_context[context] = key
|
||||
self._refs_by_context[context] = 0
|
||||
return {
|
||||
'context': context,
|
||||
'via': via,
|
||||
'init_child_result': init_child_result,
|
||||
'msg': None,
|
||||
}
|
||||
|
||||
def _wait_or_start(self, spec, via=None):
|
||||
latch = mitogen.core.Latch()
|
||||
key = key_from_dict(via=via, **spec)
|
||||
self._lock.acquire()
|
||||
try:
|
||||
response = self._response_by_key.get(key)
|
||||
if response is not None:
|
||||
self._refs_by_context[response['context']] += 1
|
||||
latch.put(response)
|
||||
return latch
|
||||
|
||||
latches = self._latches_by_key.setdefault(key, [])
|
||||
first = len(latches) == 0
|
||||
latches.append(latch)
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
if first:
|
||||
# I'm the first requestee, so I will create the connection.
|
||||
try:
|
||||
response = self._connect(key, spec, via=via)
|
||||
count = self._produce_response(key, response)
|
||||
# Only record the response for non-error results.
|
||||
self._response_by_key[key] = response
|
||||
# Set the reference count to the number of waiters.
|
||||
self._refs_by_context[response['context']] += count
|
||||
except Exception:
|
||||
self._produce_response(key, sys.exc_info())
|
||||
|
||||
return latch
|
||||
|
||||
disconnect_msg = (
|
||||
'Channel was disconnected while connection attempt was in progress; '
|
||||
'this may be caused by an abnormal Ansible exit, or due to an '
|
||||
'unreliable target.'
|
||||
)
|
||||
|
||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||
@mitogen.service.arg_spec({
|
||||
'stack': list
|
||||
})
|
||||
def get(self, stack):
|
||||
"""
|
||||
Return a Context referring to an established connection with the given
|
||||
configuration, establishing new connections as necessary.
|
||||
|
||||
:param list stack:
|
||||
Connection descriptions. Each element is a dict containing 'method'
|
||||
and 'kwargs' keys describing the Router method and arguments.
|
||||
Subsequent elements are proxied via the previous.
|
||||
|
||||
:returns dict:
|
||||
* context: mitogen.parent.Context or None.
|
||||
* init_child_result: Result of :func:`init_child`.
|
||||
* msg: StreamError exception text or None.
|
||||
* method_name: string failing method name.
|
||||
"""
|
||||
via = None
|
||||
for spec in stack:
|
||||
try:
|
||||
result = self._wait_or_start(spec, via=via).get()
|
||||
if isinstance(result, tuple): # exc_info()
|
||||
reraise(*result)
|
||||
via = result['context']
|
||||
except mitogen.core.ChannelError:
|
||||
return {
|
||||
'context': None,
|
||||
'init_child_result': None,
|
||||
'method_name': spec['method'],
|
||||
'msg': self.disconnect_msg,
|
||||
}
|
||||
except mitogen.core.StreamError as e:
|
||||
return {
|
||||
'context': None,
|
||||
'init_child_result': None,
|
||||
'method_name': spec['method'],
|
||||
'msg': str(e),
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ModuleDepService(mitogen.service.Service):
|
||||
"""
|
||||
Scan a new-style module and produce a cached mapping of module_utils names
|
||||
to their resolved filesystem paths.
|
||||
"""
|
||||
invoker_class = mitogen.service.SerializedInvoker
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ModuleDepService, self).__init__(*args, **kwargs)
|
||||
self._cache = {}
|
||||
|
||||
def _get_builtin_names(self, builtin_path, resolved):
|
||||
return [
|
||||
mitogen.core.to_text(fullname)
|
||||
for fullname, path, is_pkg in resolved
|
||||
if os.path.abspath(path).startswith(builtin_path)
|
||||
]
|
||||
|
||||
def _get_custom_tups(self, builtin_path, resolved):
|
||||
return [
|
||||
(mitogen.core.to_text(fullname),
|
||||
mitogen.core.to_text(path),
|
||||
is_pkg)
|
||||
for fullname, path, is_pkg in resolved
|
||||
if not os.path.abspath(path).startswith(builtin_path)
|
||||
]
|
||||
|
||||
@mitogen.service.expose(policy=mitogen.service.AllowParents())
|
||||
@mitogen.service.arg_spec({
|
||||
'module_name': mitogen.core.UnicodeType,
|
||||
'module_path': mitogen.core.FsPathTypes,
|
||||
'search_path': tuple,
|
||||
'builtin_path': mitogen.core.FsPathTypes,
|
||||
'context': mitogen.core.Context,
|
||||
})
|
||||
def scan(self, module_name, module_path, search_path, builtin_path, context):
|
||||
key = (module_name, search_path)
|
||||
if key not in self._cache:
|
||||
resolved = ansible_mitogen.module_finder.scan(
|
||||
module_name=module_name,
|
||||
module_path=module_path,
|
||||
search_path=tuple(search_path) + (builtin_path,),
|
||||
)
|
||||
builtin_path = os.path.abspath(builtin_path)
|
||||
builtin = self._get_builtin_names(builtin_path, resolved)
|
||||
custom = self._get_custom_tups(builtin_path, resolved)
|
||||
self._cache[key] = {
|
||||
'builtin': builtin,
|
||||
'custom': custom,
|
||||
}
|
||||
return self._cache[key]
|
373
mitogen/ansible_mitogen/strategy.py
Normal file
373
mitogen/ansible_mitogen/strategy.py
Normal file
@ -0,0 +1,373 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import distutils.version
|
||||
import os
|
||||
import signal
|
||||
import threading
|
||||
|
||||
try:
|
||||
import setproctitle
|
||||
except ImportError:
|
||||
setproctitle = None
|
||||
|
||||
import mitogen.core
|
||||
import ansible_mitogen.affinity
|
||||
import ansible_mitogen.loaders
|
||||
import ansible_mitogen.mixins
|
||||
import ansible_mitogen.process
|
||||
|
||||
import ansible
|
||||
import ansible.executor.process.worker
|
||||
|
||||
try:
|
||||
# 2.8+ has a standardized "unset" object.
|
||||
from ansible.utils.sentinel import Sentinel
|
||||
except ImportError:
|
||||
Sentinel = None
|
||||
|
||||
|
||||
ANSIBLE_VERSION_MIN = (2, 3)
|
||||
ANSIBLE_VERSION_MAX = (2, 9)
|
||||
NEW_VERSION_MSG = (
|
||||
"Your Ansible version (%s) is too recent. The most recent version\n"
|
||||
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
|
||||
"release notes to see if a new version is available, otherwise\n"
|
||||
"subscribe to the corresponding GitHub issue to be notified when\n"
|
||||
"support becomes available.\n"
|
||||
"\n"
|
||||
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
|
||||
" https://github.com/dw/mitogen/issues/\n"
|
||||
)
|
||||
OLD_VERSION_MSG = (
|
||||
"Your version of Ansible (%s) is too old. The oldest version supported by "
|
||||
"Mitogen for Ansible is %s."
|
||||
)
|
||||
|
||||
|
||||
def _assert_supported_release():
|
||||
"""
|
||||
Throw AnsibleError with a descriptive message in case of being loaded into
|
||||
an unsupported Ansible release.
|
||||
"""
|
||||
v = ansible.__version__
|
||||
if not isinstance(v, tuple):
|
||||
v = tuple(distutils.version.LooseVersion(v).version)
|
||||
|
||||
if v[:2] < ANSIBLE_VERSION_MIN:
|
||||
raise ansible.errors.AnsibleError(
|
||||
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
|
||||
)
|
||||
|
||||
if v[:2] > ANSIBLE_VERSION_MAX:
|
||||
raise ansible.errors.AnsibleError(
|
||||
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
|
||||
)
|
||||
|
||||
|
||||
def _patch_awx_callback():
|
||||
"""
|
||||
issue #400: AWX loads a display callback that suffers from thread-safety
|
||||
issues. Detect the presence of older AWX versions and patch the bug.
|
||||
"""
|
||||
# AWX uses sitecustomize.py to force-load this package. If it exists, we're
|
||||
# running under AWX.
|
||||
try:
|
||||
from awx_display_callback.events import EventContext
|
||||
from awx_display_callback.events import event_context
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
if hasattr(EventContext(), '_local'):
|
||||
# Patched version.
|
||||
return
|
||||
|
||||
def patch_add_local(self, **kwargs):
|
||||
tls = vars(self._local)
|
||||
ctx = tls.setdefault('_ctx', {})
|
||||
ctx.update(kwargs)
|
||||
|
||||
EventContext._local = threading.local()
|
||||
EventContext.add_local = patch_add_local
|
||||
|
||||
_patch_awx_callback()
|
||||
|
||||
|
||||
def wrap_action_loader__get(name, *args, **kwargs):
|
||||
"""
|
||||
While the mitogen strategy is active, trap action_loader.get() calls,
|
||||
augmenting any fetched class with ActionModuleMixin, which replaces various
|
||||
helper methods inherited from ActionBase with implementations that avoid
|
||||
the use of shell fragments wherever possible.
|
||||
|
||||
This is used instead of static subclassing as it generalizes to third party
|
||||
action plugins outside the Ansible tree.
|
||||
"""
|
||||
get_kwargs = {'class_only': True}
|
||||
if name in ('fetch',):
|
||||
name = 'mitogen_' + name
|
||||
if ansible.__version__ >= '2.8':
|
||||
get_kwargs['collection_list'] = kwargs.pop('collection_list', None)
|
||||
|
||||
klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs)
|
||||
if klass:
|
||||
bases = (ansible_mitogen.mixins.ActionModuleMixin, klass)
|
||||
adorned_klass = type(str(name), bases, {})
|
||||
if kwargs.get('class_only'):
|
||||
return adorned_klass
|
||||
return adorned_klass(*args, **kwargs)
|
||||
|
||||
|
||||
REDIRECTED_CONNECTION_PLUGINS = (
|
||||
'buildah',
|
||||
'docker',
|
||||
'kubectl',
|
||||
'jail',
|
||||
'local',
|
||||
'lxc',
|
||||
'lxd',
|
||||
'machinectl',
|
||||
'setns',
|
||||
'ssh',
|
||||
)
|
||||
|
||||
|
||||
def wrap_connection_loader__get(name, *args, **kwargs):
|
||||
"""
|
||||
While a Mitogen strategy is active, rewrite connection_loader.get() calls
|
||||
for some transports into requests for a compatible Mitogen transport.
|
||||
"""
|
||||
if name in REDIRECTED_CONNECTION_PLUGINS:
|
||||
name = 'mitogen_' + name
|
||||
|
||||
return ansible_mitogen.loaders.connection_loader__get(name, *args, **kwargs)
|
||||
|
||||
|
||||
def wrap_worker__run(self):
|
||||
"""
|
||||
While a Mitogen strategy is active, trap WorkerProcess.run() calls and use
|
||||
the opportunity to set the worker's name in the process list and log
|
||||
output, activate profiling if requested, and bind the worker to a specific
|
||||
CPU.
|
||||
"""
|
||||
if setproctitle:
|
||||
setproctitle.setproctitle('worker:%s task:%s' % (
|
||||
self._host.name,
|
||||
self._task.action,
|
||||
))
|
||||
|
||||
# Ignore parent's attempts to murder us when we still need to write
|
||||
# profiling output.
|
||||
if mitogen.core._profile_hook.__name__ != '_profile_hook':
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
|
||||
ansible_mitogen.logging.set_process_name('task')
|
||||
ansible_mitogen.affinity.policy.assign_worker()
|
||||
return mitogen.core._profile_hook('WorkerProcess',
|
||||
lambda: worker__run(self)
|
||||
)
|
||||
|
||||
|
||||
class AnsibleWrappers(object):
|
||||
"""
|
||||
Manage add/removal of various Ansible runtime hooks.
|
||||
"""
|
||||
def _add_plugin_paths(self):
|
||||
"""
|
||||
Add the Mitogen plug-in directories to the ModuleLoader path, avoiding
|
||||
the need for manual configuration.
|
||||
"""
|
||||
base_dir = os.path.join(os.path.dirname(__file__), 'plugins')
|
||||
ansible_mitogen.loaders.connection_loader.add_directory(
|
||||
os.path.join(base_dir, 'connection')
|
||||
)
|
||||
ansible_mitogen.loaders.action_loader.add_directory(
|
||||
os.path.join(base_dir, 'action')
|
||||
)
|
||||
|
||||
def _install_wrappers(self):
|
||||
"""
|
||||
Install our PluginLoader monkey patches and update global variables
|
||||
with references to the real functions.
|
||||
"""
|
||||
ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get
|
||||
ansible_mitogen.loaders.connection_loader.get = wrap_connection_loader__get
|
||||
|
||||
global worker__run
|
||||
worker__run = ansible.executor.process.worker.WorkerProcess.run
|
||||
ansible.executor.process.worker.WorkerProcess.run = wrap_worker__run
|
||||
|
||||
def _remove_wrappers(self):
|
||||
"""
|
||||
Uninstall the PluginLoader monkey patches.
|
||||
"""
|
||||
ansible_mitogen.loaders.action_loader.get = (
|
||||
ansible_mitogen.loaders.action_loader__get
|
||||
)
|
||||
ansible_mitogen.loaders.connection_loader.get = (
|
||||
ansible_mitogen.loaders.connection_loader__get
|
||||
)
|
||||
ansible.executor.process.worker.WorkerProcess.run = worker__run
|
||||
|
||||
def install(self):
|
||||
self._add_plugin_paths()
|
||||
self._install_wrappers()
|
||||
|
||||
def remove(self):
|
||||
self._remove_wrappers()
|
||||
|
||||
|
||||
class StrategyMixin(object):
|
||||
"""
|
||||
This mix-in enhances any built-in strategy by arranging for an appropriate
|
||||
WorkerModel instance to be constructed as necessary, or for the existing
|
||||
one to be reused.
|
||||
|
||||
The WorkerModel in turn arranges for a connection multiplexer to be started
|
||||
somewhere (by default in an external process), and for WorkerProcesses to
|
||||
grow support for using those top-level services to communicate with remote
|
||||
hosts.
|
||||
|
||||
Mitogen:
|
||||
|
||||
A private Broker IO multiplexer thread is created to dispatch IO
|
||||
between the local Router and any connected streams, including streams
|
||||
connected to Ansible WorkerProcesses, and SSH commands implementing
|
||||
connections to remote machines.
|
||||
|
||||
A Router is created that implements message dispatch to any locally
|
||||
registered handlers, and message routing for remote streams. Router is
|
||||
the junction point through which WorkerProceses and remote SSH contexts
|
||||
can communicate.
|
||||
|
||||
Router additionally adds message handlers for a variety of base
|
||||
services, review the Standard Handles section of the How It Works guide
|
||||
in the documentation.
|
||||
|
||||
A ContextService is installed as a message handler in the connection
|
||||
mutliplexer subprocess and run on a private thread. It is responsible
|
||||
for accepting requests to establish new SSH connections from worker
|
||||
processes, and ensuring precisely one connection exists and is reused
|
||||
for subsequent playbook steps. The service presently runs in a single
|
||||
thread, so to begin with, new SSH connections are serialized.
|
||||
|
||||
Finally a mitogen.unix listener is created through which WorkerProcess
|
||||
can establish a connection back into the connection multiplexer, in
|
||||
order to avail of ContextService. A UNIX listener socket is necessary
|
||||
as there is no more sane mechanism to arrange for IPC between the
|
||||
Router in the connection multiplexer, and the corresponding Router in
|
||||
the worker process.
|
||||
|
||||
Ansible:
|
||||
|
||||
PluginLoader monkey patches are installed to catch attempts to create
|
||||
connection and action plug-ins.
|
||||
|
||||
For connection plug-ins, if the desired method is "local" or "ssh", it
|
||||
is redirected to one of the "mitogen_*" connection plug-ins. That
|
||||
plug-in implements communication via a UNIX socket connection to the
|
||||
connection multiplexer process, and uses ContextService running there
|
||||
to establish a persistent connection to the target.
|
||||
|
||||
For action plug-ins, the original class is looked up as usual, but a
|
||||
new subclass is created dynamically in order to mix-in
|
||||
ansible_mitogen.target.ActionModuleMixin, which overrides many of the
|
||||
methods usually inherited from ActionBase in order to replace them with
|
||||
pure-Python equivalents that avoid the use of shell.
|
||||
|
||||
In particular, _execute_module() is overridden with an implementation
|
||||
that uses ansible_mitogen.target.run_module() executed in the target
|
||||
Context. run_module() implements module execution by importing the
|
||||
module as if it were a normal Python module, and capturing its output
|
||||
in the remote process. Since the Mitogen module loader is active in the
|
||||
remote process, all the heavy lifting of transferring the action module
|
||||
and its dependencies are automatically handled by Mitogen.
|
||||
"""
|
||||
|
||||
def _queue_task(self, host, task, task_vars, play_context):
|
||||
"""
|
||||
Many PluginLoader caches are defective as they are only populated in
|
||||
the ephemeral WorkerProcess. Touch each plug-in path before forking to
|
||||
ensure all workers receive a hot cache.
|
||||
"""
|
||||
ansible_mitogen.loaders.module_loader.find_plugin(
|
||||
name=task.action,
|
||||
mod_type='',
|
||||
)
|
||||
ansible_mitogen.loaders.action_loader.get(
|
||||
name=task.action,
|
||||
class_only=True,
|
||||
)
|
||||
if play_context.connection is not Sentinel:
|
||||
# 2.8 appears to defer computing this until inside the worker.
|
||||
# TODO: figure out where it has moved.
|
||||
ansible_mitogen.loaders.connection_loader.get(
|
||||
name=play_context.connection,
|
||||
class_only=True,
|
||||
)
|
||||
|
||||
return super(StrategyMixin, self)._queue_task(
|
||||
host=host,
|
||||
task=task,
|
||||
task_vars=task_vars,
|
||||
play_context=play_context,
|
||||
)
|
||||
|
||||
def _get_worker_model(self):
|
||||
"""
|
||||
In classic mode a single :class:`WorkerModel` exists, which manages
|
||||
references and configuration of the associated connection multiplexer
|
||||
process.
|
||||
"""
|
||||
return ansible_mitogen.process.get_classic_worker_model()
|
||||
|
||||
def run(self, iterator, play_context, result=0):
|
||||
"""
|
||||
Wrap :meth:`run` to ensure requisite infrastructure and modifications
|
||||
are configured for the duration of the call.
|
||||
"""
|
||||
_assert_supported_release()
|
||||
wrappers = AnsibleWrappers()
|
||||
self._worker_model = self._get_worker_model()
|
||||
ansible_mitogen.process.set_worker_model(self._worker_model)
|
||||
try:
|
||||
self._worker_model.on_strategy_start()
|
||||
try:
|
||||
wrappers.install()
|
||||
try:
|
||||
run = super(StrategyMixin, self).run
|
||||
return mitogen.core._profile_hook('Strategy',
|
||||
lambda: run(iterator, play_context)
|
||||
)
|
||||
finally:
|
||||
wrappers.remove()
|
||||
finally:
|
||||
self._worker_model.on_strategy_complete()
|
||||
finally:
|
||||
ansible_mitogen.process.set_worker_model(None)
|
777
mitogen/ansible_mitogen/target.py
Normal file
777
mitogen/ansible_mitogen/target.py
Normal file
@ -0,0 +1,777 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# !mitogen: minify_safe
|
||||
|
||||
"""
|
||||
Helper functions intended to be executed on the target. These are entrypoints
|
||||
for file transfer, module execution and sundry bits like changing file modes.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import grp
|
||||
import operator
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import signal
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import types
|
||||
|
||||
# Absolute imports for <2.5.
|
||||
logging = __import__('logging')
|
||||
|
||||
import mitogen.core
|
||||
import mitogen.fork
|
||||
import mitogen.parent
|
||||
import mitogen.service
|
||||
from mitogen.core import b
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
try:
|
||||
reduce
|
||||
except NameError:
|
||||
# Python 3.x.
|
||||
from functools import reduce
|
||||
|
||||
try:
|
||||
BaseException
|
||||
except NameError:
|
||||
# Python 2.4
|
||||
BaseException = Exception
|
||||
|
||||
|
||||
# Ansible since PR #41749 inserts "import __main__" into
|
||||
# ansible.module_utils.basic. Mitogen's importer will refuse such an import, so
|
||||
# we must setup a fake "__main__" before that module is ever imported. The
|
||||
# str() is to cast Unicode to bytes on Python 2.6.
|
||||
if not sys.modules.get(str('__main__')):
|
||||
sys.modules[str('__main__')] = types.ModuleType(str('__main__'))
|
||||
|
||||
import ansible.module_utils.json_utils
|
||||
import ansible_mitogen.runner
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
MAKE_TEMP_FAILED_MSG = (
|
||||
u"Unable to find a useable temporary directory. This likely means no\n"
|
||||
u"system-supplied TMP directory can be written to, or all directories\n"
|
||||
u"were mounted on 'noexec' filesystems.\n"
|
||||
u"\n"
|
||||
u"The following paths were tried:\n"
|
||||
u" %(paths)s\n"
|
||||
u"\n"
|
||||
u"Please check '-vvv' output for a log of individual path errors."
|
||||
)
|
||||
|
||||
# Python 2.4/2.5 cannot support fork+threads whatsoever, it doesn't even fix up
|
||||
# interpreter state. So 2.4/2.5 interpreters start .local() contexts for
|
||||
# isolation instead. Since we don't have any crazy memory sharing problems to
|
||||
# avoid, there is no virginal fork parent either. The child is started directly
|
||||
# from the login/become process. In future this will be default everywhere,
|
||||
# fork is brainwrong from the stone age.
|
||||
FORK_SUPPORTED = sys.version_info >= (2, 6)
|
||||
|
||||
#: Initialized to an econtext.parent.Context pointing at a pristine fork of
|
||||
#: the target Python interpreter before it executes any code or imports.
|
||||
_fork_parent = None
|
||||
|
||||
#: Set by :func:`init_child` to the name of a writeable and executable
|
||||
#: temporary directory accessible by the active user account.
|
||||
good_temp_dir = None
|
||||
|
||||
|
||||
def subprocess__Popen__close_fds(self, but):
|
||||
"""
|
||||
issue #362, #435: subprocess.Popen(close_fds=True) aka.
|
||||
AnsibleModule.run_command() loops the entire FD space on Python<3.2.
|
||||
CentOS>5 ships with 1,048,576 FDs by default, resulting in huge (>500ms)
|
||||
latency starting children. Therefore replace Popen._close_fds on Linux with
|
||||
a version that is O(fds) rather than O(_SC_OPEN_MAX).
|
||||
"""
|
||||
try:
|
||||
names = os.listdir(u'/proc/self/fd')
|
||||
except OSError:
|
||||
# May fail if acting on a container that does not have /proc mounted.
|
||||
self._original_close_fds(but)
|
||||
return
|
||||
|
||||
for name in names:
|
||||
if not name.isdigit():
|
||||
continue
|
||||
|
||||
fd = int(name, 10)
|
||||
if fd > 2 and fd != but:
|
||||
try:
|
||||
os.close(fd)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
if (
|
||||
sys.platform.startswith(u'linux') and
|
||||
sys.version < u'3.0' and
|
||||
hasattr(subprocess.Popen, u'_close_fds') and
|
||||
not mitogen.is_master
|
||||
):
|
||||
subprocess.Popen._original_close_fds = subprocess.Popen._close_fds
|
||||
subprocess.Popen._close_fds = subprocess__Popen__close_fds
|
||||
|
||||
|
||||
def get_small_file(context, path):
|
||||
"""
|
||||
Basic in-memory caching module fetcher. This generates one roundtrip for
|
||||
every previously unseen file, so it is only a temporary solution.
|
||||
|
||||
:param context:
|
||||
Context we should direct FileService requests to. For now (and probably
|
||||
forever) this is just the top-level Mitogen connection manager process.
|
||||
:param path:
|
||||
Path to fetch from FileService, must previously have been registered by
|
||||
a privileged context using the `register` command.
|
||||
:returns:
|
||||
Bytestring file data.
|
||||
"""
|
||||
pool = mitogen.service.get_or_create_pool(router=context.router)
|
||||
service = pool.get_service(u'mitogen.service.PushFileService')
|
||||
return service.get(path)
|
||||
|
||||
|
||||
def transfer_file(context, in_path, out_path, sync=False, set_owner=False):
|
||||
"""
|
||||
Streamily download a file from the connection multiplexer process in the
|
||||
controller.
|
||||
|
||||
:param mitogen.core.Context context:
|
||||
Reference to the context hosting the FileService that will transmit the
|
||||
file.
|
||||
:param bytes in_path:
|
||||
FileService registered name of the input file.
|
||||
:param bytes out_path:
|
||||
Name of the output path on the local disk.
|
||||
:param bool sync:
|
||||
If :data:`True`, ensure the file content and metadat are fully on disk
|
||||
before renaming the temporary file over the existing file. This should
|
||||
ensure in the case of system crash, either the entire old or new file
|
||||
are visible post-reboot.
|
||||
:param bool set_owner:
|
||||
If :data:`True`, look up the metadata username and group on the local
|
||||
system and file the file owner using :func:`os.fchmod`.
|
||||
"""
|
||||
out_path = os.path.abspath(out_path)
|
||||
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
||||
prefix='.ansible_mitogen_transfer-',
|
||||
dir=os.path.dirname(out_path))
|
||||
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
||||
LOG.debug('transfer_file(%r) temporary file: %s', out_path, tmp_path)
|
||||
|
||||
try:
|
||||
try:
|
||||
ok, metadata = mitogen.service.FileService.get(
|
||||
context=context,
|
||||
path=in_path,
|
||||
out_fp=fp,
|
||||
)
|
||||
if not ok:
|
||||
raise IOError('transfer of %r was interrupted.' % (in_path,))
|
||||
|
||||
set_file_mode(tmp_path, metadata['mode'], fd=fp.fileno())
|
||||
if set_owner:
|
||||
set_file_owner(tmp_path, metadata['owner'], metadata['group'],
|
||||
fd=fp.fileno())
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
if sync:
|
||||
os.fsync(fp.fileno())
|
||||
os.rename(tmp_path, out_path)
|
||||
except BaseException:
|
||||
os.unlink(tmp_path)
|
||||
raise
|
||||
|
||||
os.utime(out_path, (metadata['atime'], metadata['mtime']))
|
||||
|
||||
|
||||
def prune_tree(path):
|
||||
"""
|
||||
Like shutil.rmtree(), but log errors rather than discard them, and do not
|
||||
waste multiple os.stat() calls discovering whether the object can be
|
||||
deleted, just try deleting it instead.
|
||||
"""
|
||||
try:
|
||||
os.unlink(path)
|
||||
return
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
if not (os.path.isdir(path) and
|
||||
e.args[0] in (errno.EPERM, errno.EISDIR)):
|
||||
LOG.error('prune_tree(%r): %s', path, e)
|
||||
return
|
||||
|
||||
try:
|
||||
# Ensure write access for readonly directories. Ignore error in case
|
||||
# path is on a weird filesystem (e.g. vfat).
|
||||
os.chmod(path, int('0700', 8))
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
LOG.warning('prune_tree(%r): %s', path, e)
|
||||
|
||||
try:
|
||||
for name in os.listdir(path):
|
||||
if name not in ('.', '..'):
|
||||
prune_tree(os.path.join(path, name))
|
||||
os.rmdir(path)
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
LOG.error('prune_tree(%r): %s', path, e)
|
||||
|
||||
|
||||
def is_good_temp_dir(path):
|
||||
"""
|
||||
Return :data:`True` if `path` can be used as a temporary directory, logging
|
||||
any failures that may cause it to be unsuitable. If the directory doesn't
|
||||
exist, we attempt to create it using :func:`os.makedirs`.
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
os.makedirs(path, mode=int('0700', 8))
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
LOG.debug('temp dir %r unusable: did not exist and attempting '
|
||||
'to create it failed: %s', path, e)
|
||||
return False
|
||||
|
||||
try:
|
||||
tmp = tempfile.NamedTemporaryFile(
|
||||
prefix='ansible_mitogen_is_good_temp_dir',
|
||||
dir=path,
|
||||
)
|
||||
except (OSError, IOError):
|
||||
e = sys.exc_info()[1]
|
||||
LOG.debug('temp dir %r unusable: %s', path, e)
|
||||
return False
|
||||
|
||||
try:
|
||||
try:
|
||||
os.chmod(tmp.name, int('0700', 8))
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
LOG.debug('temp dir %r unusable: chmod failed: %s', path, e)
|
||||
return False
|
||||
|
||||
try:
|
||||
# access(.., X_OK) is sufficient to detect noexec.
|
||||
if not os.access(tmp.name, os.X_OK):
|
||||
raise OSError('filesystem appears to be mounted noexec')
|
||||
except OSError:
|
||||
e = sys.exc_info()[1]
|
||||
LOG.debug('temp dir %r unusable: %s', path, e)
|
||||
return False
|
||||
finally:
|
||||
tmp.close()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def find_good_temp_dir(candidate_temp_dirs):
|
||||
"""
|
||||
Given a list of candidate temp directories extracted from ``ansible.cfg``,
|
||||
combine it with the Python-builtin list of candidate directories used by
|
||||
:mod:`tempfile`, then iteratively try each until one is found that is both
|
||||
writeable and executable.
|
||||
|
||||
:param list candidate_temp_dirs:
|
||||
List of candidate $variable-expanded and tilde-expanded directory paths
|
||||
that may be usable as a temporary directory.
|
||||
"""
|
||||
paths = [os.path.expandvars(os.path.expanduser(p))
|
||||
for p in candidate_temp_dirs]
|
||||
paths.extend(tempfile._candidate_tempdir_list())
|
||||
|
||||
for path in paths:
|
||||
if is_good_temp_dir(path):
|
||||
LOG.debug('Selected temp directory: %r (from %r)', path, paths)
|
||||
return path
|
||||
|
||||
raise IOError(MAKE_TEMP_FAILED_MSG % {
|
||||
'paths': '\n '.join(paths),
|
||||
})
|
||||
|
||||
|
||||
@mitogen.core.takes_econtext
|
||||
def init_child(econtext, log_level, candidate_temp_dirs):
|
||||
"""
|
||||
Called by ContextService immediately after connection; arranges for the
|
||||
(presently) spotless Python interpreter to be forked, where the newly
|
||||
forked interpreter becomes the parent of any newly forked future
|
||||
interpreters.
|
||||
|
||||
This is necessary to prevent modules that are executed in-process from
|
||||
polluting the global interpreter state in a way that effects explicitly
|
||||
isolated modules.
|
||||
|
||||
:param int log_level:
|
||||
Logging package level active in the master.
|
||||
:param list[str] candidate_temp_dirs:
|
||||
List of $variable-expanded and tilde-expanded directory names to add to
|
||||
candidate list of temporary directories.
|
||||
|
||||
:returns:
|
||||
Dict like::
|
||||
|
||||
{
|
||||
'fork_context': mitogen.core.Context or None,
|
||||
'good_temp_dir': ...
|
||||
'home_dir': str
|
||||
}
|
||||
|
||||
Where `fork_context` refers to the newly forked 'fork parent' context
|
||||
the controller will use to start forked jobs, and `home_dir` is the
|
||||
home directory for the active user account.
|
||||
"""
|
||||
# Copying the master's log level causes log messages to be filtered before
|
||||
# they reach LogForwarder, thus reducing an influx of tiny messges waking
|
||||
# the connection multiplexer process in the master.
|
||||
LOG.setLevel(log_level)
|
||||
logging.getLogger('ansible_mitogen').setLevel(log_level)
|
||||
|
||||
# issue #536: if the json module is available, remove simplejson from the
|
||||
# importer whitelist to avoid confusing certain Ansible modules.
|
||||
if json.__name__ == 'json':
|
||||
econtext.importer.whitelist.remove('simplejson')
|
||||
|
||||
global _fork_parent
|
||||
if FORK_SUPPORTED:
|
||||
mitogen.parent.upgrade_router(econtext)
|
||||
_fork_parent = econtext.router.fork()
|
||||
|
||||
global good_temp_dir
|
||||
good_temp_dir = find_good_temp_dir(candidate_temp_dirs)
|
||||
|
||||
return {
|
||||
u'fork_context': _fork_parent,
|
||||
u'home_dir': mitogen.core.to_text(os.path.expanduser('~')),
|
||||
u'good_temp_dir': good_temp_dir,
|
||||
}
|
||||
|
||||
|
||||
@mitogen.core.takes_econtext
|
||||
def spawn_isolated_child(econtext):
|
||||
"""
|
||||
For helper functions executed in the fork parent context, arrange for
|
||||
the context's router to be upgraded as necessary and for a new child to be
|
||||
prepared.
|
||||
|
||||
The actual fork occurs from the 'virginal fork parent', which does not have
|
||||
any Ansible modules loaded prior to fork, to avoid conflicts resulting from
|
||||
custom module_utils paths.
|
||||
"""
|
||||
mitogen.parent.upgrade_router(econtext)
|
||||
if FORK_SUPPORTED:
|
||||
context = econtext.router.fork()
|
||||
else:
|
||||
context = econtext.router.local()
|
||||
LOG.debug('create_fork_child() -> %r', context)
|
||||
return context
|
||||
|
||||
|
||||
def run_module(kwargs):
|
||||
"""
|
||||
Set up the process environment in preparation for running an Ansible
|
||||
module. This monkey-patches the Ansible libraries in various places to
|
||||
prevent it from trying to kill the process on completion, and to prevent it
|
||||
from reading sys.stdin.
|
||||
"""
|
||||
runner_name = kwargs.pop('runner_name')
|
||||
klass = getattr(ansible_mitogen.runner, runner_name)
|
||||
impl = klass(**mitogen.core.Kwargs(kwargs))
|
||||
return impl.run()
|
||||
|
||||
|
||||
def _get_async_dir():
|
||||
return os.path.expanduser(
|
||||
os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
|
||||
)
|
||||
|
||||
|
||||
class AsyncRunner(object):
|
||||
def __init__(self, job_id, timeout_secs, started_sender, econtext, kwargs):
|
||||
self.job_id = job_id
|
||||
self.timeout_secs = timeout_secs
|
||||
self.started_sender = started_sender
|
||||
self.econtext = econtext
|
||||
self.kwargs = kwargs
|
||||
self._timed_out = False
|
||||
self._init_path()
|
||||
|
||||
def _init_path(self):
|
||||
async_dir = _get_async_dir()
|
||||
if not os.path.exists(async_dir):
|
||||
os.makedirs(async_dir)
|
||||
self.path = os.path.join(async_dir, self.job_id)
|
||||
|
||||
def _update(self, dct):
|
||||
"""
|
||||
Update an async job status file.
|
||||
"""
|
||||
LOG.info('%r._update(%r, %r)', self, self.job_id, dct)
|
||||
dct.setdefault('ansible_job_id', self.job_id)
|
||||
dct.setdefault('data', '')
|
||||
|
||||
fp = open(self.path + '.tmp', 'w')
|
||||
try:
|
||||
fp.write(json.dumps(dct))
|
||||
finally:
|
||||
fp.close()
|
||||
os.rename(self.path + '.tmp', self.path)
|
||||
|
||||
def _on_sigalrm(self, signum, frame):
|
||||
"""
|
||||
Respond to SIGALRM (job timeout) by updating the job file and killing
|
||||
the process.
|
||||
"""
|
||||
msg = "Job reached maximum time limit of %d seconds." % (
|
||||
self.timeout_secs,
|
||||
)
|
||||
self._update({
|
||||
"failed": 1,
|
||||
"finished": 1,
|
||||
"msg": msg,
|
||||
})
|
||||
self._timed_out = True
|
||||
self.econtext.broker.shutdown()
|
||||
|
||||
def _install_alarm(self):
|
||||
signal.signal(signal.SIGALRM, self._on_sigalrm)
|
||||
signal.alarm(self.timeout_secs)
|
||||
|
||||
def _run_module(self):
|
||||
kwargs = dict(self.kwargs, **{
|
||||
'detach': True,
|
||||
'econtext': self.econtext,
|
||||
'emulate_tty': False,
|
||||
})
|
||||
return run_module(kwargs)
|
||||
|
||||
def _parse_result(self, dct):
|
||||
filtered, warnings = (
|
||||
ansible.module_utils.json_utils.
|
||||
_filter_non_json_lines(dct['stdout'])
|
||||
)
|
||||
result = json.loads(filtered)
|
||||
result.setdefault('warnings', []).extend(warnings)
|
||||
result['stderr'] = dct['stderr'] or result.get('stderr', '')
|
||||
self._update(result)
|
||||
|
||||
def _run(self):
|
||||
"""
|
||||
1. Immediately updates the status file to mark the job as started.
|
||||
2. Installs a timer/signal handler to implement the time limit.
|
||||
3. Runs as with run_module(), writing the result to the status file.
|
||||
|
||||
:param dict kwargs:
|
||||
Runner keyword arguments.
|
||||
:param str job_id:
|
||||
String job ID.
|
||||
:param int timeout_secs:
|
||||
If >0, limit the task's maximum run time.
|
||||
"""
|
||||
self._update({
|
||||
'started': 1,
|
||||
'finished': 0,
|
||||
'pid': os.getpid()
|
||||
})
|
||||
self.started_sender.send(True)
|
||||
|
||||
if self.timeout_secs > 0:
|
||||
self._install_alarm()
|
||||
|
||||
dct = self._run_module()
|
||||
if not self._timed_out:
|
||||
# After SIGALRM fires, there is a window between broker responding
|
||||
# to shutdown() by killing the process, and work continuing on the
|
||||
# main thread. If main thread was asleep in at least
|
||||
# basic.py/select.select(), an EINTR will be raised. We want to
|
||||
# discard that exception.
|
||||
try:
|
||||
self._parse_result(dct)
|
||||
except Exception:
|
||||
self._update({
|
||||
"failed": 1,
|
||||
"msg": traceback.format_exc(),
|
||||
"data": dct['stdout'], # temporary notice only
|
||||
"stderr": dct['stderr']
|
||||
})
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
try:
|
||||
self._run()
|
||||
except Exception:
|
||||
self._update({
|
||||
"failed": 1,
|
||||
"msg": traceback.format_exc(),
|
||||
})
|
||||
finally:
|
||||
self.econtext.broker.shutdown()
|
||||
|
||||
|
||||
@mitogen.core.takes_econtext
|
||||
def run_module_async(kwargs, job_id, timeout_secs, started_sender, econtext):
|
||||
"""
|
||||
Execute a module with its run status and result written to a file,
|
||||
terminating on the process on completion. This function must run in a child
|
||||
forked using :func:`create_fork_child`.
|
||||
|
||||
@param mitogen.core.Sender started_sender:
|
||||
A sender that will receive :data:`True` once the job has reached a
|
||||
point where its initial job file has been written. This is required to
|
||||
avoid a race where an overly eager controller can check for a task
|
||||
before it has reached that point in execution, which is possible at
|
||||
least on Python 2.4, where forking is not available for async tasks.
|
||||
"""
|
||||
arunner = AsyncRunner(
|
||||
job_id,
|
||||
timeout_secs,
|
||||
started_sender,
|
||||
econtext,
|
||||
kwargs
|
||||
)
|
||||
arunner.run()
|
||||
|
||||
|
||||
def get_user_shell():
|
||||
"""
|
||||
For commands executed directly via an SSH command-line, SSH looks up the
|
||||
user's shell via getpwuid() and only defaults to /bin/sh if that field is
|
||||
missing or empty.
|
||||
"""
|
||||
try:
|
||||
pw_shell = pwd.getpwuid(os.geteuid()).pw_shell
|
||||
except KeyError:
|
||||
pw_shell = None
|
||||
|
||||
return pw_shell or '/bin/sh'
|
||||
|
||||
|
||||
def exec_args(args, in_data='', chdir=None, shell=None, emulate_tty=False):
|
||||
"""
|
||||
Run a command in a subprocess, emulating the argument handling behaviour of
|
||||
SSH.
|
||||
|
||||
:param list[str]:
|
||||
Argument vector.
|
||||
:param bytes in_data:
|
||||
Optional standard input for the command.
|
||||
:param bool emulate_tty:
|
||||
If :data:`True`, arrange for stdout and stderr to be merged into the
|
||||
stdout pipe and for LF to be translated into CRLF, emulating the
|
||||
behaviour of a TTY.
|
||||
:return:
|
||||
(return code, stdout bytes, stderr bytes)
|
||||
"""
|
||||
LOG.debug('exec_args(%r, ..., chdir=%r)', args, chdir)
|
||||
assert isinstance(args, list)
|
||||
|
||||
if emulate_tty:
|
||||
stderr = subprocess.STDOUT
|
||||
else:
|
||||
stderr = subprocess.PIPE
|
||||
|
||||
proc = subprocess.Popen(
|
||||
args=args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=stderr,
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=chdir,
|
||||
)
|
||||
stdout, stderr = proc.communicate(in_data)
|
||||
|
||||
if emulate_tty:
|
||||
stdout = stdout.replace(b('\n'), b('\r\n'))
|
||||
return proc.returncode, stdout, stderr or b('')
|
||||
|
||||
|
||||
def exec_command(cmd, in_data='', chdir=None, shell=None, emulate_tty=False):
|
||||
"""
|
||||
Run a command in a subprocess, emulating the argument handling behaviour of
|
||||
SSH.
|
||||
|
||||
:param bytes cmd:
|
||||
String command line, passed to user's shell.
|
||||
:param bytes in_data:
|
||||
Optional standard input for the command.
|
||||
:return:
|
||||
(return code, stdout bytes, stderr bytes)
|
||||
"""
|
||||
assert isinstance(cmd, mitogen.core.UnicodeType)
|
||||
return exec_args(
|
||||
args=[get_user_shell(), '-c', cmd],
|
||||
in_data=in_data,
|
||||
chdir=chdir,
|
||||
shell=shell,
|
||||
emulate_tty=emulate_tty,
|
||||
)
|
||||
|
||||
|
||||
def read_path(path):
|
||||
"""
|
||||
Fetch the contents of a filesystem `path` as bytes.
|
||||
"""
|
||||
return open(path, 'rb').read()
|
||||
|
||||
|
||||
def set_file_owner(path, owner, group=None, fd=None):
|
||||
if owner:
|
||||
uid = pwd.getpwnam(owner).pw_uid
|
||||
else:
|
||||
uid = os.geteuid()
|
||||
|
||||
if group:
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = os.getegid()
|
||||
|
||||
if fd is not None and hasattr(os, 'fchown'):
|
||||
os.fchown(fd, (uid, gid))
|
||||
else:
|
||||
# Python<2.6
|
||||
os.chown(path, (uid, gid))
|
||||
|
||||
|
||||
def write_path(path, s, owner=None, group=None, mode=None,
|
||||
utimes=None, sync=False):
|
||||
"""
|
||||
Writes bytes `s` to a filesystem `path`.
|
||||
"""
|
||||
path = os.path.abspath(path)
|
||||
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
||||
prefix='.ansible_mitogen_transfer-',
|
||||
dir=os.path.dirname(path))
|
||||
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
||||
LOG.debug('write_path(path=%r) temporary file: %s', path, tmp_path)
|
||||
|
||||
try:
|
||||
try:
|
||||
if mode:
|
||||
set_file_mode(tmp_path, mode, fd=fp.fileno())
|
||||
if owner or group:
|
||||
set_file_owner(tmp_path, owner, group, fd=fp.fileno())
|
||||
fp.write(s)
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
if sync:
|
||||
os.fsync(fp.fileno())
|
||||
os.rename(tmp_path, path)
|
||||
except BaseException:
|
||||
os.unlink(tmp_path)
|
||||
raise
|
||||
|
||||
if utimes:
|
||||
os.utime(path, utimes)
|
||||
|
||||
|
||||
CHMOD_CLAUSE_PAT = re.compile(r'([uoga]*)([+\-=])([ugo]|[rwx]*)')
|
||||
CHMOD_MASKS = {
|
||||
'u': stat.S_IRWXU,
|
||||
'g': stat.S_IRWXG,
|
||||
'o': stat.S_IRWXO,
|
||||
'a': (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO),
|
||||
}
|
||||
CHMOD_BITS = {
|
||||
'u': {'r': stat.S_IRUSR, 'w': stat.S_IWUSR, 'x': stat.S_IXUSR},
|
||||
'g': {'r': stat.S_IRGRP, 'w': stat.S_IWGRP, 'x': stat.S_IXGRP},
|
||||
'o': {'r': stat.S_IROTH, 'w': stat.S_IWOTH, 'x': stat.S_IXOTH},
|
||||
'a': {
|
||||
'r': (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH),
|
||||
'w': (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH),
|
||||
'x': (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def apply_mode_spec(spec, mode):
|
||||
"""
|
||||
Given a symbolic file mode change specification in the style of chmod(1)
|
||||
`spec`, apply changes in the specification to the numeric file mode `mode`.
|
||||
"""
|
||||
for clause in mitogen.core.to_text(spec).split(','):
|
||||
match = CHMOD_CLAUSE_PAT.match(clause)
|
||||
who, op, perms = match.groups()
|
||||
for ch in who or 'a':
|
||||
mask = CHMOD_MASKS[ch]
|
||||
bits = CHMOD_BITS[ch]
|
||||
cur_perm_bits = mode & mask
|
||||
new_perm_bits = reduce(operator.or_, (bits[p] for p in perms), 0)
|
||||
mode &= ~mask
|
||||
if op == '=':
|
||||
mode |= new_perm_bits
|
||||
elif op == '+':
|
||||
mode |= new_perm_bits | cur_perm_bits
|
||||
else:
|
||||
mode |= cur_perm_bits & ~new_perm_bits
|
||||
return mode
|
||||
|
||||
|
||||
def set_file_mode(path, spec, fd=None):
|
||||
"""
|
||||
Update the permissions of a file using the same syntax as chmod(1).
|
||||
"""
|
||||
if isinstance(spec, int):
|
||||
new_mode = spec
|
||||
elif not mitogen.core.PY3 and isinstance(spec, long):
|
||||
new_mode = spec
|
||||
elif spec.isdigit():
|
||||
new_mode = int(spec, 8)
|
||||
else:
|
||||
mode = os.stat(path).st_mode
|
||||
new_mode = apply_mode_spec(spec, mode)
|
||||
|
||||
if fd is not None and hasattr(os, 'fchmod'):
|
||||
os.fchmod(fd, new_mode)
|
||||
else:
|
||||
os.chmod(path, new_mode)
|
||||
|
||||
|
||||
def file_exists(path):
|
||||
"""
|
||||
Return :data:`True` if `path` exists. This is a wrapper function over
|
||||
:func:`os.path.exists`, since its implementation module varies across
|
||||
Python versions.
|
||||
"""
|
||||
return os.path.exists(path)
|
699
mitogen/ansible_mitogen/transport_config.py
Normal file
699
mitogen/ansible_mitogen/transport_config.py
Normal file
@ -0,0 +1,699 @@
|
||||
# Copyright 2019, David Wilson
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
"""
|
||||
Mitogen extends Ansible's target configuration mechanism in several ways that
|
||||
require some care:
|
||||
|
||||
* Per-task configurables in Ansible like ansible_python_interpreter are
|
||||
connection-layer configurables in Mitogen. They must be extracted during each
|
||||
task execution to form the complete connection-layer configuration.
|
||||
|
||||
* Mitogen has extra configurables not supported by Ansible at all, such as
|
||||
mitogen_ssh_debug_level. These are extracted the same way as
|
||||
ansible_python_interpreter.
|
||||
|
||||
* Mitogen allows connections to be delegated to other machines. Ansible has no
|
||||
internal framework for this, and so Mitogen must figure out a delegated
|
||||
connection configuration all on its own. It cannot reuse much of the Ansible
|
||||
machinery for building a connection configuration, as that machinery is
|
||||
deeply spread out and hard-wired to expect Ansible's usual mode of operation.
|
||||
|
||||
For normal and delegate_to connections, Ansible's PlayContext is reused where
|
||||
possible to maximize compatibility, but for proxy hops, configurations are
|
||||
built up using the HostVars magic class to call VariableManager.get_vars()
|
||||
behind the scenes on our behalf. Where Ansible has multiple sources of a
|
||||
configuration item, for example, ansible_ssh_extra_args, Mitogen must (ideally
|
||||
perfectly) reproduce how Ansible arrives at its value, without using mechanisms
|
||||
that are hard-wired or change across Ansible versions.
|
||||
|
||||
That is what this file is for. It exports two spec classes, one that takes all
|
||||
information from PlayContext, and another that takes (almost) all information
|
||||
from HostVars.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import os
|
||||
import ansible.utils.shlex
|
||||
import ansible.constants as C
|
||||
|
||||
from ansible.module_utils.six import with_metaclass
|
||||
|
||||
|
||||
import mitogen.core
|
||||
|
||||
|
||||
def parse_python_path(s):
|
||||
"""
|
||||
Given the string set for ansible_python_interpeter, parse it using shell
|
||||
syntax and return an appropriate argument vector.
|
||||
"""
|
||||
if s:
|
||||
return ansible.utils.shlex.shlex_split(s)
|
||||
|
||||
|
||||
def optional_secret(value):
|
||||
"""
|
||||
Wrap `value` in :class:`mitogen.core.Secret` if it is not :data:`None`,
|
||||
otherwise return :data:`None`.
|
||||
"""
|
||||
if value is not None:
|
||||
return mitogen.core.Secret(value)
|
||||
|
||||
|
||||
def first_true(it, default=None):
|
||||
"""
|
||||
Return the first truthy element from `it`.
|
||||
"""
|
||||
for elem in it:
|
||||
if elem:
|
||||
return elem
|
||||
return default
|
||||
|
||||
|
||||
class Spec(with_metaclass(abc.ABCMeta, object)):
|
||||
"""
|
||||
A source for variables that comprise a connection configuration.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def transport(self):
|
||||
"""
|
||||
The name of the Ansible plug-in implementing the connection.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def inventory_name(self):
|
||||
"""
|
||||
The name of the target being connected to as it appears in Ansible's
|
||||
inventory.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def remote_addr(self):
|
||||
"""
|
||||
The network address of the target, or for container and other special
|
||||
targets, some other unique identifier.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def remote_user(self):
|
||||
"""
|
||||
The username of the login account on the target.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def password(self):
|
||||
"""
|
||||
The password of the login account on the target.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def become(self):
|
||||
"""
|
||||
:data:`True` if privilege escalation should be active.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def become_method(self):
|
||||
"""
|
||||
The name of the Ansible become method to use.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def become_user(self):
|
||||
"""
|
||||
The username of the target account for become.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def become_pass(self):
|
||||
"""
|
||||
The password of the target account for become.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def port(self):
|
||||
"""
|
||||
The port of the login service on the target machine.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def python_path(self):
|
||||
"""
|
||||
Path to the Python interpreter on the target machine.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_key_file(self):
|
||||
"""
|
||||
Path to the SSH private key file to use to login.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def ssh_executable(self):
|
||||
"""
|
||||
Path to the SSH executable.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def timeout(self):
|
||||
"""
|
||||
The generic timeout for all connections.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def ansible_ssh_timeout(self):
|
||||
"""
|
||||
The SSH-specific timeout for a connection.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def ssh_args(self):
|
||||
"""
|
||||
The list of additional arguments that should be included in an SSH
|
||||
invocation.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def become_exe(self):
|
||||
"""
|
||||
The path to the executable implementing the become method on the remote
|
||||
machine.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def sudo_args(self):
|
||||
"""
|
||||
The list of additional arguments that should be included in a become
|
||||
invocation.
|
||||
"""
|
||||
# TODO: split out into sudo_args/become_args.
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_via(self):
|
||||
"""
|
||||
The value of the mitogen_via= variable for this connection. Indicates
|
||||
the connection should be established via an intermediary.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_kind(self):
|
||||
"""
|
||||
The type of container to use with the "setns" transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_mask_remote_name(self):
|
||||
"""
|
||||
Specifies whether to set a fixed "remote_name" field. The remote_name
|
||||
is the suffix of `argv[0]` for remote interpreters. By default it
|
||||
includes identifying information from the local process, which may be
|
||||
undesirable in some circumstances.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_buildah_path(self):
|
||||
"""
|
||||
The path to the "buildah" program for the 'buildah' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_docker_path(self):
|
||||
"""
|
||||
The path to the "docker" program for the 'docker' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_kubectl_path(self):
|
||||
"""
|
||||
The path to the "kubectl" program for the 'docker' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_lxc_path(self):
|
||||
"""
|
||||
The path to the "lxc" program for the 'lxd' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_lxc_attach_path(self):
|
||||
"""
|
||||
The path to the "lxc-attach" program for the 'lxc' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_lxc_info_path(self):
|
||||
"""
|
||||
The path to the "lxc-info" program for the 'lxc' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_machinectl_path(self):
|
||||
"""
|
||||
The path to the "machinectl" program for the 'setns' transport.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_ssh_keepalive_interval(self):
|
||||
"""
|
||||
The SSH ServerAliveInterval.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_ssh_keepalive_count(self):
|
||||
"""
|
||||
The SSH ServerAliveCount.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_ssh_debug_level(self):
|
||||
"""
|
||||
The SSH debug level.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def mitogen_ssh_compression(self):
|
||||
"""
|
||||
Whether SSH compression is enabled.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def extra_args(self):
|
||||
"""
|
||||
Connection-specific arguments.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def ansible_doas_exe(self):
|
||||
"""
|
||||
Value of "ansible_doas_exe" variable.
|
||||
"""
|
||||
|
||||
|
||||
class PlayContextSpec(Spec):
|
||||
"""
|
||||
PlayContextSpec takes almost all its information as-is from Ansible's
|
||||
PlayContext. It is used for normal connections and delegate_to connections,
|
||||
and should always be accurate.
|
||||
"""
|
||||
def __init__(self, connection, play_context, transport, inventory_name):
|
||||
self._connection = connection
|
||||
self._play_context = play_context
|
||||
self._transport = transport
|
||||
self._inventory_name = inventory_name
|
||||
|
||||
def transport(self):
|
||||
return self._transport
|
||||
|
||||
def inventory_name(self):
|
||||
return self._inventory_name
|
||||
|
||||
def remote_addr(self):
|
||||
return self._play_context.remote_addr
|
||||
|
||||
def remote_user(self):
|
||||
return self._play_context.remote_user
|
||||
|
||||
def become(self):
|
||||
return self._play_context.become
|
||||
|
||||
def become_method(self):
|
||||
return self._play_context.become_method
|
||||
|
||||
def become_user(self):
|
||||
return self._play_context.become_user
|
||||
|
||||
def become_pass(self):
|
||||
return optional_secret(self._play_context.become_pass)
|
||||
|
||||
def password(self):
|
||||
return optional_secret(self._play_context.password)
|
||||
|
||||
def port(self):
|
||||
return self._play_context.port
|
||||
|
||||
def python_path(self):
|
||||
s = self._connection.get_task_var('ansible_python_interpreter')
|
||||
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
||||
# "/usr/bin/python" as the default interpreter path if no other
|
||||
# interpreter is specified.
|
||||
return parse_python_path(s or '/usr/bin/python')
|
||||
|
||||
def private_key_file(self):
|
||||
return self._play_context.private_key_file
|
||||
|
||||
def ssh_executable(self):
|
||||
return self._play_context.ssh_executable
|
||||
|
||||
def timeout(self):
|
||||
return self._play_context.timeout
|
||||
|
||||
def ansible_ssh_timeout(self):
|
||||
return (
|
||||
self._connection.get_task_var('ansible_timeout') or
|
||||
self._connection.get_task_var('ansible_ssh_timeout') or
|
||||
self.timeout()
|
||||
)
|
||||
|
||||
def ssh_args(self):
|
||||
return [
|
||||
mitogen.core.to_text(term)
|
||||
for s in (
|
||||
getattr(self._play_context, 'ssh_args', ''),
|
||||
getattr(self._play_context, 'ssh_common_args', ''),
|
||||
getattr(self._play_context, 'ssh_extra_args', '')
|
||||
)
|
||||
for term in ansible.utils.shlex.shlex_split(s or '')
|
||||
]
|
||||
|
||||
def become_exe(self):
|
||||
# In Ansible 2.8, PlayContext.become_exe always has a default value due
|
||||
# to the new options mechanism. Previously it was only set if a value
|
||||
# ("somewhere") had been specified for the task.
|
||||
# For consistency in the tests, here we make older Ansibles behave like
|
||||
# newer Ansibles.
|
||||
exe = self._play_context.become_exe
|
||||
if exe is None and self._play_context.become_method == 'sudo':
|
||||
exe = 'sudo'
|
||||
return exe
|
||||
|
||||
def sudo_args(self):
|
||||
return [
|
||||
mitogen.core.to_text(term)
|
||||
for term in ansible.utils.shlex.shlex_split(
|
||||
first_true((
|
||||
self._play_context.become_flags,
|
||||
# Ansible <=2.7.
|
||||
getattr(self._play_context, 'sudo_flags', ''),
|
||||
# Ansible <=2.3.
|
||||
getattr(C, 'DEFAULT_BECOME_FLAGS', ''),
|
||||
getattr(C, 'DEFAULT_SUDO_FLAGS', '')
|
||||
), default='')
|
||||
)
|
||||
]
|
||||
|
||||
def mitogen_via(self):
|
||||
return self._connection.get_task_var('mitogen_via')
|
||||
|
||||
def mitogen_kind(self):
|
||||
return self._connection.get_task_var('mitogen_kind')
|
||||
|
||||
def mitogen_mask_remote_name(self):
|
||||
return self._connection.get_task_var('mitogen_mask_remote_name')
|
||||
|
||||
def mitogen_buildah_path(self):
|
||||
return self._connection.get_task_var('mitogen_buildah_path')
|
||||
|
||||
def mitogen_docker_path(self):
|
||||
return self._connection.get_task_var('mitogen_docker_path')
|
||||
|
||||
def mitogen_kubectl_path(self):
|
||||
return self._connection.get_task_var('mitogen_kubectl_path')
|
||||
|
||||
def mitogen_lxc_path(self):
|
||||
return self._connection.get_task_var('mitogen_lxc_path')
|
||||
|
||||
def mitogen_lxc_attach_path(self):
|
||||
return self._connection.get_task_var('mitogen_lxc_attach_path')
|
||||
|
||||
def mitogen_lxc_info_path(self):
|
||||
return self._connection.get_task_var('mitogen_lxc_info_path')
|
||||
|
||||
def mitogen_ssh_keepalive_interval(self):
|
||||
return self._connection.get_task_var('mitogen_ssh_keepalive_interval')
|
||||
|
||||
def mitogen_ssh_keepalive_count(self):
|
||||
return self._connection.get_task_var('mitogen_ssh_keepalive_count')
|
||||
|
||||
def mitogen_machinectl_path(self):
|
||||
return self._connection.get_task_var('mitogen_machinectl_path')
|
||||
|
||||
def mitogen_ssh_debug_level(self):
|
||||
return self._connection.get_task_var('mitogen_ssh_debug_level')
|
||||
|
||||
def mitogen_ssh_compression(self):
|
||||
return self._connection.get_task_var('mitogen_ssh_compression')
|
||||
|
||||
def extra_args(self):
|
||||
return self._connection.get_extra_args()
|
||||
|
||||
def ansible_doas_exe(self):
|
||||
return (
|
||||
self._connection.get_task_var('ansible_doas_exe') or
|
||||
os.environ.get('ANSIBLE_DOAS_EXE')
|
||||
)
|
||||
|
||||
|
||||
class MitogenViaSpec(Spec):
|
||||
"""
|
||||
MitogenViaSpec takes most of its information from the HostVars of the
|
||||
running task. HostVars is a lightweight wrapper around VariableManager, so
|
||||
it is better to say that VariableManager.get_vars() is the ultimate source
|
||||
of MitogenViaSpec's information.
|
||||
|
||||
Due to this, mitogen_via= hosts must have all their configuration
|
||||
information represented as host and group variables. We cannot use any
|
||||
per-task configuration, as all that data belongs to the real target host.
|
||||
|
||||
Ansible uses all kinds of strange historical logic for calculating
|
||||
variables, including making their precedence configurable. MitogenViaSpec
|
||||
must ultimately reimplement all of that logic. It is likely that if you are
|
||||
having a configruation problem with connection delegation, the answer to
|
||||
your problem lies in the method implementations below!
|
||||
"""
|
||||
def __init__(self, inventory_name, host_vars, become_method, become_user,
|
||||
play_context):
|
||||
"""
|
||||
:param str inventory_name:
|
||||
The inventory name of the intermediary machine, i.e. not the target
|
||||
machine.
|
||||
:param dict host_vars:
|
||||
The HostVars magic dictionary provided by Ansible in task_vars.
|
||||
:param str become_method:
|
||||
If the mitogen_via= spec included a become method, the method it
|
||||
specifies.
|
||||
:param str become_user:
|
||||
If the mitogen_via= spec included a become user, the user it
|
||||
specifies.
|
||||
:param PlayContext play_context:
|
||||
For some global values **only**, the PlayContext used to describe
|
||||
the real target machine. Values from this object are **strictly
|
||||
restricted** to values that are Ansible-global, e.g. the passwords
|
||||
specified interactively.
|
||||
"""
|
||||
self._inventory_name = inventory_name
|
||||
self._host_vars = host_vars
|
||||
self._become_method = become_method
|
||||
self._become_user = become_user
|
||||
# Dangerous! You may find a variable you want in this object, but it's
|
||||
# almost certainly for the wrong machine!
|
||||
self._dangerous_play_context = play_context
|
||||
|
||||
def transport(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_connection') or
|
||||
C.DEFAULT_TRANSPORT
|
||||
)
|
||||
|
||||
def inventory_name(self):
|
||||
return self._inventory_name
|
||||
|
||||
def remote_addr(self):
|
||||
# play_context.py::MAGIC_VARIABLE_MAPPING
|
||||
return (
|
||||
self._host_vars.get('ansible_ssh_host') or
|
||||
self._host_vars.get('ansible_host') or
|
||||
self._inventory_name
|
||||
)
|
||||
|
||||
def remote_user(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_ssh_user') or
|
||||
self._host_vars.get('ansible_user') or
|
||||
C.DEFAULT_REMOTE_USER
|
||||
)
|
||||
|
||||
def become(self):
|
||||
return bool(self._become_user)
|
||||
|
||||
def become_method(self):
|
||||
return (
|
||||
self._become_method or
|
||||
self._host_vars.get('ansible_become_method') or
|
||||
C.DEFAULT_BECOME_METHOD
|
||||
)
|
||||
|
||||
def become_user(self):
|
||||
return self._become_user
|
||||
|
||||
def become_pass(self):
|
||||
return optional_secret(
|
||||
self._host_vars.get('ansible_become_password') or
|
||||
self._host_vars.get('ansible_become_pass')
|
||||
)
|
||||
|
||||
def password(self):
|
||||
return optional_secret(
|
||||
self._host_vars.get('ansible_ssh_pass') or
|
||||
self._host_vars.get('ansible_password')
|
||||
)
|
||||
|
||||
def port(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_ssh_port') or
|
||||
self._host_vars.get('ansible_port') or
|
||||
C.DEFAULT_REMOTE_PORT
|
||||
)
|
||||
|
||||
def python_path(self):
|
||||
s = self._host_vars.get('ansible_python_interpreter')
|
||||
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
||||
# "/usr/bin/python" as the default interpreter path if no other
|
||||
# interpreter is specified.
|
||||
return parse_python_path(s or '/usr/bin/python')
|
||||
|
||||
def private_key_file(self):
|
||||
# TODO: must come from PlayContext too.
|
||||
return (
|
||||
self._host_vars.get('ansible_ssh_private_key_file') or
|
||||
self._host_vars.get('ansible_private_key_file') or
|
||||
C.DEFAULT_PRIVATE_KEY_FILE
|
||||
)
|
||||
|
||||
def ssh_executable(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_ssh_executable') or
|
||||
C.ANSIBLE_SSH_EXECUTABLE
|
||||
)
|
||||
|
||||
def timeout(self):
|
||||
# TODO: must come from PlayContext too.
|
||||
return C.DEFAULT_TIMEOUT
|
||||
|
||||
def ansible_ssh_timeout(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_timeout') or
|
||||
self._host_vars.get('ansible_ssh_timeout') or
|
||||
self.timeout()
|
||||
)
|
||||
|
||||
def ssh_args(self):
|
||||
return [
|
||||
mitogen.core.to_text(term)
|
||||
for s in (
|
||||
(
|
||||
self._host_vars.get('ansible_ssh_args') or
|
||||
getattr(C, 'ANSIBLE_SSH_ARGS', None) or
|
||||
os.environ.get('ANSIBLE_SSH_ARGS')
|
||||
# TODO: ini entry. older versions.
|
||||
),
|
||||
(
|
||||
self._host_vars.get('ansible_ssh_common_args') or
|
||||
os.environ.get('ANSIBLE_SSH_COMMON_ARGS')
|
||||
# TODO: ini entry.
|
||||
),
|
||||
(
|
||||
self._host_vars.get('ansible_ssh_extra_args') or
|
||||
os.environ.get('ANSIBLE_SSH_EXTRA_ARGS')
|
||||
# TODO: ini entry.
|
||||
),
|
||||
)
|
||||
for term in ansible.utils.shlex.shlex_split(s)
|
||||
if s
|
||||
]
|
||||
|
||||
def become_exe(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_become_exe') or
|
||||
C.DEFAULT_BECOME_EXE
|
||||
)
|
||||
|
||||
def sudo_args(self):
|
||||
return [
|
||||
mitogen.core.to_text(term)
|
||||
for s in (
|
||||
self._host_vars.get('ansible_sudo_flags') or '',
|
||||
self._host_vars.get('ansible_become_flags') or '',
|
||||
)
|
||||
for term in ansible.utils.shlex.shlex_split(s)
|
||||
]
|
||||
|
||||
def mitogen_via(self):
|
||||
return self._host_vars.get('mitogen_via')
|
||||
|
||||
def mitogen_kind(self):
|
||||
return self._host_vars.get('mitogen_kind')
|
||||
|
||||
def mitogen_mask_remote_name(self):
|
||||
return self._host_vars.get('mitogen_mask_remote_name')
|
||||
|
||||
def mitogen_buildah_path(self):
|
||||
return self._host_vars.get('mitogen_buildah_path')
|
||||
|
||||
def mitogen_docker_path(self):
|
||||
return self._host_vars.get('mitogen_docker_path')
|
||||
|
||||
def mitogen_kubectl_path(self):
|
||||
return self._host_vars.get('mitogen_kubectl_path')
|
||||
|
||||
def mitogen_lxc_path(self):
|
||||
return self.host_vars.get('mitogen_lxc_path')
|
||||
|
||||
def mitogen_lxc_attach_path(self):
|
||||
return self._host_vars.get('mitogen_lxc_attach_path')
|
||||
|
||||
def mitogen_lxc_info_path(self):
|
||||
return self._host_vars.get('mitogen_lxc_info_path')
|
||||
|
||||
def mitogen_ssh_keepalive_interval(self):
|
||||
return self._host_vars.get('mitogen_ssh_keepalive_interval')
|
||||
|
||||
def mitogen_ssh_keepalive_count(self):
|
||||
return self._host_vars.get('mitogen_ssh_keepalive_count')
|
||||
|
||||
def mitogen_machinectl_path(self):
|
||||
return self._host_vars.get('mitogen_machinectl_path')
|
||||
|
||||
def mitogen_ssh_debug_level(self):
|
||||
return self._host_vars.get('mitogen_ssh_debug_level')
|
||||
|
||||
def mitogen_ssh_compression(self):
|
||||
return self._host_vars.get('mitogen_ssh_compression')
|
||||
|
||||
def extra_args(self):
|
||||
return [] # TODO
|
||||
|
||||
def ansible_doas_exe(self):
|
||||
return (
|
||||
self._host_vars.get('ansible_doas_exe') or
|
||||
os.environ.get('ANSIBLE_DOAS_EXE')
|
||||
)
|
Reference in New Issue
Block a user