Commit 985463fd authored by nanahira's avatar nanahira

mitogen

parent 3fda26b0
[defaults] [defaults]
inventory = inventory.yaml inventory = inventory.yaml
host_key_checking = False host_key_checking = False
strategy_plugins = mitogen-0.2.9/ansible_mitogen/plugins/strategy strategy_plugins = mitogen-0.3.7/ansible_mitogen/plugins/strategy
strategy = mitogen_linear strategy = mitogen_linear
mycard: mycard:
hosts: hosts:
example.com: 153.34.0.137:
children: children:
router_nextgen: router_nextgen:
rcloud: rcloud:
vars: # 一般可以什么都不改,想改的话改什么抄什么 vars: # 一般可以什么都不改,想改的话改什么抄什么
ansible_ssh_user: root ansible_ssh_user: frog
ansible_python_interpreter: python3 ansible_python_interpreter: python3
authorized_keys: | authorized_keys: |
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCf7d3z1zJ3+AekdFlF+7BTL8k1X5YrC/Mu23aBBETYxl7N37XM/Ts1u15Xu9l0rgKOe2YphAzexsTmvnBW+3ZJUaJ4gIelw2FWYSHmPkcTtLRrqmSqqCVqyxLrlkvDV1mJmRTQh9rFT3OM2jge4HkN2PSnxuT4AuRwblHcGj2LWD/4XijmHf/BK6GaKownt702ARX18D7d2hYZwO7TgVtJsZclIg3g9SjLgtx6sx+Khlx2/p3HdMo7lYmbucnk1Px11Z0Uq6H7gdUyZhRMWgHnqpnzTy02onhsDhPotUbJdEHJKMZW+eg3X1KiPH143jDHPW/EQjhnPfMBBqaPof1l nanahira@nanahira-another ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCq9xZpQkZ7MAfRRI2pVHzpCgjWFsGjBDW4ue4d83FtCYYRvZ8MCmRq3kIASpnQUKHWkHR8OZdYVjlCZyVxbVQgF4/YpPzSo64wtWR2gdKEtYLzEgsu8Y6u5hcqPAEFK1bZA01oxj8/sn+LsQOFfhaMfBEx2wplKz8CzgJImzMHX4Zf1VkeNdKvfnSV1oX8DpSMm7eNdg0W84LXi7dIACNi9FU2b/I5Z+OZmjK6eHVQjT4aaRs9fY7Tx9QknDq0i7yhPQQGNTHCA8EQmnYN1pj9jqudNguqZ/PnqGMp2OMSdlBe5XWmERK0mjQPq80sN2bQjIXUKC8XnTtNrxf+GgJVWTCogVdLvRn4C4j32yMDFrtfK1kxLNhZANZGFHDMiv5aRYmeIM3t02DoClt/IvHNB1gOXd0ljJZy9/TExYuI/y7XkC9PWxvkNtl8jEWN6/uyWCQ2kuos92b51BXHynijaKp7qFqj6ae7T4dnKhSVwCJOFR+d8RrMEwDPAMfBveE= frogeater@LAPTOP-5570BFO6
ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAtRix6NrCSXMNpL9WuD6DA198aGirvb8cYIcx5fS98/EWqA8n8yjBEjfLkWZviSh8J6hDw5x4rlZWa777eP+qFfwZO5MjQp/n3cgpZgnbJFRUROuNEyaGQvv09uO05cgRKemVDysqte6xjH6YOts/+oX6dC/JK+Cwi7K0kUETQ2WLLTghyQfLkwKoXkP30v/j18yfyswyWsM1E70stmezMRYswsAeOP6j5/dZiSY9vPCPHJ0w3cGhV+YZcWVE3687cQyf++Iv4AGBzRWlGStGHfb3UB8fkeIClChkQDjjzrxfbrmeS3kC5w6hkbZFsreM8ZvWhDvB1eBxjU9KKbV0iQ== zh99998@gmail.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAtRix6NrCSXMNpL9WuD6DA198aGirvb8cYIcx5fS98/EWqA8n8yjBEjfLkWZviSh8J6hDw5x4rlZWa777eP+qFfwZO5MjQp/n3cgpZgnbJFRUROuNEyaGQvv09uO05cgRKemVDysqte6xjH6YOts/+oX6dC/JK+Cwi7K0kUETQ2WLLTghyQfLkwKoXkP30v/j18yfyswyWsM1E70stmezMRYswsAeOP6j5/dZiSY9vPCPHJ0w3cGhV+YZcWVE3687cQyf++Iv4AGBzRWlGStGHfb3UB8fkeIClChkQDjjzrxfbrmeS3kC5w6hkbZFsreM8ZvWhDvB1eBxjU9KKbV0iQ==
allow_password: false allow_password: false
hypervisor: false hypervisor: false
mirror_debian: http://deb.debian.org mirror_debian: http://deb.debian.org
...@@ -17,12 +17,12 @@ mycard: ...@@ -17,12 +17,12 @@ mycard:
mirror_ubuntu: http://archive.ubuntu.com mirror_ubuntu: http://archive.ubuntu.com
mirror_ubuntu_security: http://security.ubuntu.com mirror_ubuntu_security: http://security.ubuntu.com
mirror_docker: https://download.docker.com mirror_docker: https://download.docker.com
china_mirror: false china_mirror: true
install_docker: true install_docker: true
upgrade: true upgrade: true
reboot: true reboot: true
pip_executable_install: pip pip_executable_install: pip
registry: https://registry.mycard.moe registry: null
china_mirror_docker: false china_mirror_docker: false
china_mirror_pip: false china_mirror_pip: false
china_mirror_omf: false china_mirror_omf: false
......
# Mitogen
<!-- [![Build Status](https://travis-ci.org/dw/mitogen.png?branch=master)](https://travis-ci.org/dw/mitogen}) -->
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/dw/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/dw/mitogen/alerts/)
[![Build Status](https://travis-ci.org/dw/mitogen.svg?branch=master)](https://travis-ci.org/dw/mitogen)
[![Pipelines Status](https://dev.azure.com/dw-mitogen/Mitogen/_apis/build/status/dw.mitogen?branchName=master)](https://dev.azure.com/dw-mitogen/Mitogen/_build/latest?definitionId=1?branchName=master)
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
Copyright 2019, David Wilson Copyright 2021, the Mitogen authors
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met: modification, are permitted provided that the following conditions are met:
......
Metadata-Version: 1.1 Metadata-Version: 2.1
Name: mitogen Name: mitogen
Version: 0.2.9 Version: 0.3.7
Summary: Library for writing distributed self-replicating programs. Summary: Library for writing distributed self-replicating programs.
Home-page: https://github.com/dw/mitogen/ Home-page: https://github.com/mitogen-hq/mitogen/
Author: David Wilson Author: David Wilson
Author-email: UNKNOWN
License: New BSD License: New BSD
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Environment :: Console Classifier: Environment :: Console
Classifier: Framework :: Ansible
Classifier: Intended Audience :: System Administrators Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: BSD License Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.4
Classifier: Programming Language :: Python :: 2.5
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Topic :: System :: Distributed Computing Classifier: Topic :: System :: Distributed Computing
Classifier: Topic :: System :: Systems Administration Classifier: Topic :: System :: Systems Administration
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
Description-Content-Type: text/markdown
License-File: LICENSE
# Mitogen
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
# Mitogen
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
...@@ -73,7 +73,9 @@ necessarily involves preventing the scheduler from making load balancing ...@@ -73,7 +73,9 @@ necessarily involves preventing the scheduler from making load balancing
decisions. decisions.
""" """
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ctypes import ctypes
import logging import logging
import mmap import mmap
......
...@@ -26,8 +26,9 @@ ...@@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals from __future__ import unicode_literals
__metaclass__ = type
import errno import errno
import logging import logging
...@@ -40,11 +41,8 @@ import time ...@@ -40,11 +41,8 @@ import time
import ansible.constants as C import ansible.constants as C
import ansible.errors import ansible.errors
import ansible.plugins.connection import ansible.plugins.connection
import ansible.utils.shlex
import mitogen.core import mitogen.core
import mitogen.fork
import mitogen.utils
import ansible_mitogen.mixins import ansible_mitogen.mixins
import ansible_mitogen.parsing import ansible_mitogen.parsing
...@@ -52,6 +50,7 @@ import ansible_mitogen.process ...@@ -52,6 +50,7 @@ import ansible_mitogen.process
import ansible_mitogen.services import ansible_mitogen.services
import ansible_mitogen.target import ansible_mitogen.target
import ansible_mitogen.transport_config import ansible_mitogen.transport_config
import ansible_mitogen.utils.unsafe
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
...@@ -183,7 +182,7 @@ def _connect_docker(spec): ...@@ -183,7 +182,7 @@ def _connect_docker(spec):
'kwargs': { 'kwargs': {
'username': spec.remote_user(), 'username': spec.remote_user(),
'container': spec.remote_addr(), 'container': spec.remote_addr(),
'python_path': spec.python_path(), 'python_path': spec.python_path(rediscover_python=True),
'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(), 'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(),
'remote_name': get_remote_name(spec), 'remote_name': get_remote_name(spec),
} }
...@@ -262,6 +261,21 @@ def _connect_machinectl(spec): ...@@ -262,6 +261,21 @@ def _connect_machinectl(spec):
return _connect_setns(spec, kind='machinectl') return _connect_setns(spec, kind='machinectl')
def _connect_podman(spec):
"""
Return ContextService arguments for a Docker connection.
"""
return {
'method': 'podman',
'kwargs': {
'username': spec.remote_user(),
'container': spec.remote_addr(),
'python_path': spec.python_path(rediscover_python=True),
'connect_timeout': spec.ansible_ssh_timeout() or spec.timeout(),
'remote_name': get_remote_name(spec),
}
}
def _connect_setns(spec, kind=None): def _connect_setns(spec, kind=None):
""" """
Return ContextService arguments for a mitogen_setns connection. Return ContextService arguments for a mitogen_setns connection.
...@@ -400,6 +414,7 @@ CONNECTION_METHOD = { ...@@ -400,6 +414,7 @@ CONNECTION_METHOD = {
'lxc': _connect_lxc, 'lxc': _connect_lxc,
'lxd': _connect_lxd, 'lxd': _connect_lxd,
'machinectl': _connect_machinectl, 'machinectl': _connect_machinectl,
'podman': _connect_podman,
'setns': _connect_setns, 'setns': _connect_setns,
'ssh': _connect_ssh, 'ssh': _connect_ssh,
'smart': _connect_ssh, # issue #548. 'smart': _connect_ssh, # issue #548.
...@@ -469,6 +484,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -469,6 +484,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
login_context = None login_context = None
#: Only sudo, su, and doas are supported for now. #: Only sudo, su, and doas are supported for now.
# Ansible ConnectionBase attribute, removed in Ansible >= 2.8
become_methods = ['sudo', 'su', 'doas'] become_methods = ['sudo', 'su', 'doas']
#: Dict containing init_child() return value as recorded at startup by #: Dict containing init_child() return value as recorded at startup by
...@@ -503,14 +519,8 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -503,14 +519,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
#: matching vanilla Ansible behaviour. #: matching vanilla Ansible behaviour.
loader_basedir = None loader_basedir = None
def __del__(self): # set by `_get_task_vars()` for interpreter discovery
""" _action = None
Ansible cannot be trusted to always call close() e.g. the synchronize
action constructs a local connection like this. So provide a destructor
in the hopes of catching these cases.
"""
# https://github.com/dw/mitogen/issues/140
self.close()
def on_action_run(self, task_vars, delegate_to_hostname, loader_basedir): def on_action_run(self, task_vars, delegate_to_hostname, loader_basedir):
""" """
...@@ -551,6 +561,23 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -551,6 +561,23 @@ class Connection(ansible.plugins.connection.ConnectionBase):
connection passed into any running action. connection passed into any running action.
""" """
if self._task_vars is not None: if self._task_vars is not None:
# check for if self._action has already been set or not
# there are some cases where the ansible executor passes in task_vars
# so we don't walk the stack to find them
# TODO: is there a better way to get the ActionModuleMixin object?
# ansible python discovery needs it to run discover_interpreter()
if not isinstance(self._action, ansible_mitogen.mixins.ActionModuleMixin):
f = sys._getframe()
while f:
if f.f_code.co_name == 'run':
f_self = f.f_locals.get('self')
if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin):
self._action = f_self
break
elif f.f_code.co_name == '_execute_meta':
break
f = f.f_back
return self._task_vars return self._task_vars
f = sys._getframe() f = sys._getframe()
...@@ -559,6 +586,9 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -559,6 +586,9 @@ class Connection(ansible.plugins.connection.ConnectionBase):
f_locals = f.f_locals f_locals = f.f_locals
f_self = f_locals.get('self') f_self = f_locals.get('self')
if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin): if isinstance(f_self, ansible_mitogen.mixins.ActionModuleMixin):
# backref for python interpreter discovery, should be safe because _get_task_vars
# is always called before running interpreter discovery
self._action = f_self
task_vars = f_locals.get('task_vars') task_vars = f_locals.get('task_vars')
if task_vars: if task_vars:
LOG.debug('recovered task_vars from Action') LOG.debug('recovered task_vars from Action')
...@@ -600,16 +630,33 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -600,16 +630,33 @@ class Connection(ansible.plugins.connection.ConnectionBase):
does not make sense to extract connection-related configuration for the does not make sense to extract connection-related configuration for the
delegated-to machine from them. delegated-to machine from them.
""" """
def _fetch_task_var(task_vars, key):
"""
Special helper func in case vars can be templated
"""
SPECIAL_TASK_VARS = [
'ansible_python_interpreter'
]
if key in task_vars:
val = task_vars[key]
if '{' in str(val) and key in SPECIAL_TASK_VARS:
# template every time rather than storing in a cache
# in case a different template value is used in a different task
val = self.templar.template(
val,
preserve_trailing_newlines=True,
escape_backslashes=False
)
return val
task_vars = self._get_task_vars() task_vars = self._get_task_vars()
if self.delegate_to_hostname is None: if self.delegate_to_hostname is None:
if key in task_vars: return _fetch_task_var(task_vars, key)
return task_vars[key]
else: else:
delegated_vars = task_vars['ansible_delegated_vars'] delegated_vars = task_vars['ansible_delegated_vars']
if self.delegate_to_hostname in delegated_vars: if self.delegate_to_hostname in delegated_vars:
task_vars = delegated_vars[self.delegate_to_hostname] task_vars = delegated_vars[self.delegate_to_hostname]
if key in task_vars: return _fetch_task_var(task_vars, key)
return task_vars[key]
return default return default
...@@ -629,6 +676,9 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -629,6 +676,9 @@ class Connection(ansible.plugins.connection.ConnectionBase):
@property @property
def connected(self): def connected(self):
"""
Ansible connection plugin property. Used by ansible-connection command.
"""
return self.context is not None return self.context is not None
def _spec_from_via(self, proxied_inventory_name, via_spec): def _spec_from_via(self, proxied_inventory_name, via_spec):
...@@ -654,6 +704,8 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -654,6 +704,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
inventory_name=inventory_name, inventory_name=inventory_name,
play_context=self._play_context, play_context=self._play_context,
host_vars=dict(via_vars), # TODO: make it lazy host_vars=dict(via_vars), # TODO: make it lazy
task_vars=self._get_task_vars(), # needed for interpreter discovery in parse_python_path
action=self._action,
become_method=become_method or None, become_method=become_method or None,
become_user=become_user or None, become_user=become_user or None,
) )
...@@ -745,7 +797,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -745,7 +797,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
call_context=self.binding.get_service_context(), call_context=self.binding.get_service_context(),
service_name='ansible_mitogen.services.ContextService', service_name='ansible_mitogen.services.ContextService',
method_name='get', method_name='get',
stack=mitogen.utils.cast(list(stack)), stack=ansible_mitogen.utils.unsafe.cast(list(stack)),
) )
except mitogen.core.CallError: except mitogen.core.CallError:
LOG.warning('Connection failed; stack configuration was:\n%s', LOG.warning('Connection failed; stack configuration was:\n%s',
...@@ -785,14 +837,18 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -785,14 +837,18 @@ class Connection(ansible.plugins.connection.ConnectionBase):
the _connect_*() service calls defined above to cause the master the _connect_*() service calls defined above to cause the master
process to establish the real connection on our behalf, or return a process to establish the real connection on our behalf, or return a
reference to the existing one. reference to the existing one.
Ansible connection plugin method.
""" """
# In some Ansible connection plugins this method returns self.
# However nothing I've found uses it, it's not even assigned.
if self.connected: if self.connected:
return return
inventory_name, stack = self._build_stack() inventory_name, stack = self._build_stack()
worker_model = ansible_mitogen.process.get_worker_model() worker_model = ansible_mitogen.process.get_worker_model()
self.binding = worker_model.get_binding( self.binding = worker_model.get_binding(
mitogen.utils.cast(inventory_name) ansible_mitogen.utils.unsafe.cast(inventory_name)
) )
self._connect_stack(stack) self._connect_stack(stack)
...@@ -823,6 +879,8 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -823,6 +879,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Arrange for the mitogen.master.Router running in the worker to Arrange for the mitogen.master.Router running in the worker to
gracefully shut down, and wait for shutdown to complete. Safe to call gracefully shut down, and wait for shutdown to complete. Safe to call
multiple times. multiple times.
Ansible connection plugin method.
""" """
self._put_connection() self._put_connection()
if self.binding: if self.binding:
...@@ -839,6 +897,8 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -839,6 +897,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
any local state we hold for the connection, returns the Connection to any local state we hold for the connection, returns the Connection to
the 'disconnected' state, and informs ContextService the connection is the 'disconnected' state, and informs ContextService the connection is
bad somehow, and should be shut down and discarded. bad somehow, and should be shut down and discarded.
Ansible connection plugin method.
""" """
if self._play_context.remote_addr is None: if self._play_context.remote_addr is None:
# <2.5.6 incorrectly populate PlayContext for reset_connection # <2.5.6 incorrectly populate PlayContext for reset_connection
...@@ -847,6 +907,18 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -847,6 +907,18 @@ class Connection(ansible.plugins.connection.ConnectionBase):
self.reset_compat_msg self.reset_compat_msg
) )
# Strategy's _execute_meta doesn't have an action obj but we'll need one for
# running interpreter_discovery
# will create a new temporary action obj for this purpose
self._action = ansible_mitogen.mixins.ActionModuleMixin(
task=0,
connection=self,
play_context=self._play_context,
loader=0,
templar=0,
shared_loader_obj=0
)
# Clear out state in case we were ever connected. # Clear out state in case we were ever connected.
self.close() self.close()
...@@ -861,7 +933,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -861,7 +933,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
call_context=binding.get_service_context(), call_context=binding.get_service_context(),
service_name='ansible_mitogen.services.ContextService', service_name='ansible_mitogen.services.ContextService',
method_name='reset', method_name='reset',
stack=mitogen.utils.cast(list(stack)), stack=ansible_mitogen.utils.unsafe.cast(list(stack)),
) )
finally: finally:
binding.close() binding.close()
...@@ -933,12 +1005,14 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -933,12 +1005,14 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Data to supply on ``stdin`` of the process. Data to supply on ``stdin`` of the process.
:returns: :returns:
(return code, stdout bytes, stderr bytes) (return code, stdout bytes, stderr bytes)
Ansible connection plugin method.
""" """
emulate_tty = (not in_data and sudoable) emulate_tty = (not in_data and sudoable)
rc, stdout, stderr = self.get_chain().call( rc, stdout, stderr = self.get_chain().call(
ansible_mitogen.target.exec_command, ansible_mitogen.target.exec_command,
cmd=mitogen.utils.cast(cmd), cmd=ansible_mitogen.utils.unsafe.cast(cmd),
in_data=mitogen.utils.cast(in_data), in_data=ansible_mitogen.utils.unsafe.cast(in_data),
chdir=mitogen_chdir or self.get_default_cwd(), chdir=mitogen_chdir or self.get_default_cwd(),
emulate_tty=emulate_tty, emulate_tty=emulate_tty,
) )
...@@ -958,12 +1032,14 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -958,12 +1032,14 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Remote filesystem path to read. Remote filesystem path to read.
:param str out_path: :param str out_path:
Local filesystem path to write. Local filesystem path to write.
Ansible connection plugin method.
""" """
self._connect() self._connect()
ansible_mitogen.target.transfer_file( ansible_mitogen.target.transfer_file(
context=self.context, context=self.context,
# in_path may be AnsibleUnicode # in_path may be AnsibleUnicode
in_path=mitogen.utils.cast(in_path), in_path=ansible_mitogen.utils.unsafe.cast(in_path),
out_path=out_path out_path=out_path
) )
...@@ -981,7 +1057,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -981,7 +1057,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
""" """
self.get_chain().call_no_reply( self.get_chain().call_no_reply(
ansible_mitogen.target.write_path, ansible_mitogen.target.write_path,
mitogen.utils.cast(out_path), ansible_mitogen.utils.unsafe.cast(out_path),
mitogen.core.Blob(data), mitogen.core.Blob(data),
mode=mode, mode=mode,
utimes=utimes, utimes=utimes,
...@@ -1007,6 +1083,8 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -1007,6 +1083,8 @@ class Connection(ansible.plugins.connection.ConnectionBase):
Local filesystem path to read. Local filesystem path to read.
:param str out_path: :param str out_path:
Remote filesystem path to write. Remote filesystem path to write.
Ansible connection plugin method.
""" """
try: try:
st = os.stat(in_path) st = os.stat(in_path)
...@@ -1027,7 +1105,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -1027,7 +1105,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
s = fp.read(self.SMALL_FILE_LIMIT + 1) s = fp.read(self.SMALL_FILE_LIMIT + 1)
finally: finally:
fp.close() fp.close()
except OSError: except OSError as e:
self._throw_io_error(e, in_path) self._throw_io_error(e, in_path)
raise raise
...@@ -1041,7 +1119,7 @@ class Connection(ansible.plugins.connection.ConnectionBase): ...@@ -1041,7 +1119,7 @@ class Connection(ansible.plugins.connection.ConnectionBase):
call_context=self.binding.get_service_context(), call_context=self.binding.get_service_context(),
service_name='mitogen.service.FileService', service_name='mitogen.service.FileService',
method_name='register', method_name='register',
path=mitogen.utils.cast(in_path) path=ansible_mitogen.utils.unsafe.cast(in_path)
) )
# For now this must remain synchronous, as the action plug-in may have # For now this must remain synchronous, as the action plug-in may have
......
...@@ -30,10 +30,16 @@ ...@@ -30,10 +30,16 @@
Stable names for PluginLoader instances across Ansible versions. Stable names for PluginLoader instances across Ansible versions.
""" """
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ansible.errors
import ansible_mitogen.utils
__all__ = [ __all__ = [
'action_loader', 'action_loader',
'become_loader',
'connection_loader', 'connection_loader',
'module_loader', 'module_loader',
'module_utils_loader', 'module_utils_loader',
...@@ -41,22 +47,57 @@ __all__ = [ ...@@ -41,22 +47,57 @@ __all__ = [
'strategy_loader', 'strategy_loader',
] ]
try:
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
except ImportError: # Ansible <2.4
from ansible.plugins import action_loader
from ansible.plugins import connection_loader
from ansible.plugins import module_loader
from ansible.plugins import module_utils_loader
from ansible.plugins import shell_loader
from ansible.plugins import strategy_loader
ANSIBLE_VERSION_MIN = (2, 10)
ANSIBLE_VERSION_MAX = (2, 16)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/mitogen-hq/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible_mitogen.utils.ansible_version
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (v, ANSIBLE_VERSION_MAX)
)
# this is the first file our strategy plugins import, so we need to check this here
# in prior Ansible versions, connection_loader.get_with_context didn't exist, so if a user
# is trying to load an old Ansible version, we'll fail and error gracefully
assert_supported_release()
from ansible.plugins.loader import action_loader
from ansible.plugins.loader import become_loader
from ansible.plugins.loader import connection_loader
from ansible.plugins.loader import module_loader
from ansible.plugins.loader import module_utils_loader
from ansible.plugins.loader import shell_loader
from ansible.plugins.loader import strategy_loader
# These are original, unwrapped implementations # These are original, unwrapped implementations
action_loader__get = action_loader.get action_loader__get = action_loader.get
connection_loader__get = connection_loader.get connection_loader__get = connection_loader.get_with_context
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import logging import logging
import os import os
...@@ -36,8 +38,8 @@ import mitogen.utils ...@@ -36,8 +38,8 @@ import mitogen.utils
try: try:
from __main__ import display from __main__ import display
except ImportError: except ImportError:
from ansible.utils.display import Display import ansible.utils.display
display = Display() display = ansible.utils.display.Display()
#: The process name set via :func:`set_process_name`. #: The process name set via :func:`set_process_name`.
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import logging import logging
import os import os
import pwd import pwd
...@@ -48,11 +50,13 @@ import ansible.plugins.action ...@@ -48,11 +50,13 @@ import ansible.plugins.action
import mitogen.core import mitogen.core
import mitogen.select import mitogen.select
import mitogen.utils
import ansible_mitogen.connection import ansible_mitogen.connection
import ansible_mitogen.planner import ansible_mitogen.planner
import ansible_mitogen.target import ansible_mitogen.target
import ansible_mitogen.utils
import ansible_mitogen.utils.unsafe
from ansible.module_utils._text import to_text from ansible.module_utils._text import to_text
try: try:
...@@ -60,6 +64,17 @@ try: ...@@ -60,6 +64,17 @@ try:
except ImportError: except ImportError:
from ansible.vars.unsafe_proxy import wrap_var from ansible.vars.unsafe_proxy import wrap_var
try:
# ansible 2.8 moved remove_internal_keys to the clean module
from ansible.vars.clean import remove_internal_keys
except ImportError:
try:
from ansible.vars.manager import remove_internal_keys
except ImportError:
# ansible 2.3.3 has remove_internal_keys as a protected func on the action class
# we'll fallback to calling self._remove_internal_keys in this case
remove_internal_keys = lambda a: "Not found"
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
...@@ -108,6 +123,16 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -108,6 +123,16 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
if not isinstance(connection, ansible_mitogen.connection.Connection): if not isinstance(connection, ansible_mitogen.connection.Connection):
_, self.__class__ = type(self).__bases__ _, self.__class__ = type(self).__bases__
# required for python interpreter discovery
connection.templar = self._templar
self._finding_python_interpreter = False
self._rediscovered_python = False
# redeclaring interpreter discovery vars here in case running ansible < 2.8.0
self._discovered_interpreter_key = None
self._discovered_interpreter = False
self._discovery_deprecation_warnings = []
self._discovery_warnings = []
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
""" """
Override run() to notify Connection of task-specific data, so it has a Override run() to notify Connection of task-specific data, so it has a
...@@ -162,7 +187,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -162,7 +187,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
LOG.debug('_remote_file_exists(%r)', path) LOG.debug('_remote_file_exists(%r)', path)
return self._connection.get_chain().call( return self._connection.get_chain().call(
ansible_mitogen.target.file_exists, ansible_mitogen.target.file_exists,
mitogen.utils.cast(path) ansible_mitogen.utils.unsafe.cast(path)
) )
def _configure_module(self, module_name, module_args, task_vars=None): def _configure_module(self, module_name, module_args, task_vars=None):
...@@ -205,7 +230,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -205,7 +230,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
with a pipelined call to :func:`ansible_mitogen.target.prune_tree`. with a pipelined call to :func:`ansible_mitogen.target.prune_tree`.
""" """
LOG.debug('_remove_tmp_path(%r)', tmp_path) LOG.debug('_remove_tmp_path(%r)', tmp_path)
if tmp_path is None and ansible.__version__ > '2.6': if tmp_path is None and ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
tmp_path = self._connection._shell.tmpdir # 06f73ad578d tmp_path = self._connection._shell.tmpdir # 06f73ad578d
if tmp_path is not None: if tmp_path is not None:
self._connection.get_chain().call_no_reply( self._connection.get_chain().call_no_reply(
...@@ -299,7 +324,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -299,7 +324,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
# ~root/.ansible -> /root/.ansible # ~root/.ansible -> /root/.ansible
return self._connection.get_chain(use_login=(not sudoable)).call( return self._connection.get_chain(use_login=(not sudoable)).call(
os.path.expanduser, os.path.expanduser,
mitogen.utils.cast(path), ansible_mitogen.utils.unsafe.cast(path),
) )
def get_task_timeout_secs(self): def get_task_timeout_secs(self):
...@@ -314,7 +339,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -314,7 +339,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
def _set_temp_file_args(self, module_args, wrap_async): def _set_temp_file_args(self, module_args, wrap_async):
# Ansible>2.5 module_utils reuses the action's temporary directory if # Ansible>2.5 module_utils reuses the action's temporary directory if
# one exists. Older versions error if this key is present. # one exists. Older versions error if this key is present.
if ansible.__version__ > '2.5': if ansible_mitogen.utils.ansible_version[:2] >= (2, 5):
if wrap_async: if wrap_async:
# Sharing is not possible with async tasks, as in that case, # Sharing is not possible with async tasks, as in that case,
# the directory must outlive the action plug-in. # the directory must outlive the action plug-in.
...@@ -325,7 +350,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -325,7 +350,7 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
# If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use # If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use
# _ansible_remote_tmp as the location to create the module's temporary # _ansible_remote_tmp as the location to create the module's temporary
# directory. Older versions error if this key is present. # directory. Older versions error if this key is present.
if ansible.__version__ > '2.6': if ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
module_args['_ansible_remote_tmp'] = ( module_args['_ansible_remote_tmp'] = (
self._connection.get_good_temp_dir() self._connection.get_good_temp_dir()
) )
...@@ -350,26 +375,61 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -350,26 +375,61 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
self._compute_environment_string(env) self._compute_environment_string(env)
self._set_temp_file_args(module_args, wrap_async) self._set_temp_file_args(module_args, wrap_async)
# there's a case where if a task shuts down the node and then immediately calls
# wait_for_connection, the `ping` test from Ansible won't pass because we lost connection
# clearing out context forces a reconnect
# see https://github.com/dw/mitogen/issues/655 and Ansible's `wait_for_connection` module for more info
if module_name == 'ansible.legacy.ping' and type(self).__name__ == 'wait_for_connection':
self._connection.context = None
self._connection._connect() self._connection._connect()
result = ansible_mitogen.planner.invoke( result = ansible_mitogen.planner.invoke(
ansible_mitogen.planner.Invocation( ansible_mitogen.planner.Invocation(
action=self, action=self,
connection=self._connection, connection=self._connection,
module_name=mitogen.core.to_text(module_name), module_name=ansible_mitogen.utils.unsafe.cast(mitogen.core.to_text(module_name)),
module_args=mitogen.utils.cast(module_args), module_args=ansible_mitogen.utils.unsafe.cast(module_args),
task_vars=task_vars, task_vars=task_vars,
templar=self._templar, templar=self._templar,
env=mitogen.utils.cast(env), env=ansible_mitogen.utils.unsafe.cast(env),
wrap_async=wrap_async, wrap_async=wrap_async,
timeout_secs=self.get_task_timeout_secs(), timeout_secs=self.get_task_timeout_secs(),
) )
) )
if tmp and ansible.__version__ < '2.5' and delete_remote_tmp: if tmp and delete_remote_tmp and ansible_mitogen.utils.ansible_version[:2] < (2, 5):
# Built-in actions expected tmpdir to be cleaned up automatically # Built-in actions expected tmpdir to be cleaned up automatically
# on _execute_module(). # on _execute_module().
self._remove_tmp_path(tmp) self._remove_tmp_path(tmp)
# prevents things like discovered_interpreter_* or ansible_discovered_interpreter_* from being set
# handle ansible 2.3.3 that has remove_internal_keys in a different place
check = remove_internal_keys(result)
if check == 'Not found':
self._remove_internal_keys(result)
# taken from _execute_module of ansible 2.8.6
# propagate interpreter discovery results back to the controller
if self._discovered_interpreter_key:
if result.get('ansible_facts') is None:
result['ansible_facts'] = {}
# only cache discovered_interpreter if we're not running a rediscovery
# rediscovery happens in places like docker connections that could have different
# python interpreters than the main host
if not self._rediscovered_python:
result['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
if self._discovery_warnings:
if result.get('warnings') is None:
result['warnings'] = []
result['warnings'].extend(self._discovery_warnings)
if self._discovery_deprecation_warnings:
if result.get('deprecations') is None:
result['deprecations'] = []
result['deprecations'].extend(self._discovery_deprecation_warnings)
return wrap_var(result) return wrap_var(result)
def _postprocess_response(self, result): def _postprocess_response(self, result):
...@@ -407,17 +467,54 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase): ...@@ -407,17 +467,54 @@ class ActionModuleMixin(ansible.plugins.action.ActionBase):
""" """
LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)', LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)',
cmd, type(in_data), executable, chdir) cmd, type(in_data), executable, chdir)
if executable is None: # executable defaults to False if executable is None: # executable defaults to False
executable = self._play_context.executable executable = self._play_context.executable
if executable: if executable:
cmd = executable + ' -c ' + shlex_quote(cmd) cmd = executable + ' -c ' + shlex_quote(cmd)
rc, stdout, stderr = self._connection.exec_command( # TODO: HACK: if finding python interpreter then we need to keep
# calling exec_command until we run into the right python we'll use
# chicken-and-egg issue, mitogen needs a python to run low_level_execute_command
# which is required by Ansible's discover_interpreter function
if self._finding_python_interpreter:
possible_pythons = [
'/usr/bin/python',
'python3',
'python3.7',
'python3.6',
'python3.5',
'python2.7',
'python2.6',
'/usr/libexec/platform-python',
'/usr/bin/python3',
'python'
]
else:
# not used, just adding a filler value
possible_pythons = ['python']
def _run_cmd():
return self._connection.exec_command(
cmd=cmd, cmd=cmd,
in_data=in_data, in_data=in_data,
sudoable=sudoable, sudoable=sudoable,
mitogen_chdir=chdir, mitogen_chdir=chdir,
) )
for possible_python in possible_pythons:
try:
self._possible_python_interpreter = possible_python
rc, stdout, stderr = _run_cmd()
# TODO: what exception is thrown?
except:
# we've reached the last python attempted and failed
# TODO: could use enumerate(), need to check which version of python first had it though
if possible_python == 'python':
raise
else:
continue
stdout_text = to_text(stdout, errors=encoding_errors) stdout_text = to_text(stdout, errors=encoding_errors)
return { return {
......
...@@ -26,19 +26,36 @@ ...@@ -26,19 +26,36 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals from __future__ import unicode_literals
__metaclass__ = type
import collections import collections
import imp import logging
import os import os
import re
import sys
try:
# Python >= 3.4, PEP 451 ModuleSpec API
import importlib.machinery
import importlib.util
except ImportError:
# Python < 3.4, PEP 302 Import Hooks
import imp
import mitogen.master import mitogen.master
LOG = logging.getLogger(__name__)
PREFIX = 'ansible.module_utils.' PREFIX = 'ansible.module_utils.'
# Analog of `importlib.machinery.ModuleSpec` or `pkgutil.ModuleInfo`.
# name Unqualified name of the module.
# path Filesystem path of the module.
# kind One of the constants in `imp`, as returned in `imp.find_module()`
# parent `ansible_mitogen.module_finder.Module` of parent package (if any).
Module = collections.namedtuple('Module', 'name path kind parent') Module = collections.namedtuple('Module', 'name path kind parent')
...@@ -118,14 +135,121 @@ def find_relative(parent, name, path=()): ...@@ -118,14 +135,121 @@ def find_relative(parent, name, path=()):
def scan_fromlist(code): def scan_fromlist(code):
"""Return an iterator of (level, name) for explicit imports in a code
object.
Not all names identify a module. `from os import name, path` generates
`(0, 'os.name'), (0, 'os.path')`, but `os.name` is usually a string.
>>> src = 'import a; import b.c; from d.e import f; from g import h, i\\n'
>>> code = compile(src, '<str>', 'exec')
>>> list(scan_fromlist(code))
[(0, 'a'), (0, 'b.c'), (0, 'd.e.f'), (0, 'g.h'), (0, 'g.i')]
"""
for level, modname_s, fromlist in mitogen.master.scan_code_imports(code): for level, modname_s, fromlist in mitogen.master.scan_code_imports(code):
for name in fromlist: for name in fromlist:
yield level, '%s.%s' % (modname_s, name) yield level, str('%s.%s' % (modname_s, name))
if not fromlist: if not fromlist:
yield level, modname_s yield level, modname_s
def walk_imports(code, prefix=None):
"""Return an iterator of names for implicit parent imports & explicit
imports in a code object.
If a prefix is provided, then only children of that prefix are included.
Not all names identify a module. `from os import name, path` generates
`'os', 'os.name', 'os.path'`, but `os.name` is usually a string.
>>> source = 'import a; import b; import b.c; from b.d import e, f\\n'
>>> code = compile(source, '<str>', 'exec')
>>> list(walk_imports(code))
['a', 'b', 'b', 'b.c', 'b', 'b.d', 'b.d.e', 'b.d.f']
>>> list(walk_imports(code, prefix='b'))
['b.c', 'b.d', 'b.d.e', 'b.d.f']
"""
if prefix is None:
prefix = ''
pattern = re.compile(r'(^|\.)(\w+)')
start = len(prefix)
for _, name, fromlist in mitogen.master.scan_code_imports(code):
if not name.startswith(prefix):
continue
for match in pattern.finditer(name, start):
yield name[:match.end()]
for leaf in fromlist:
yield str('%s.%s' % (name, leaf))
def scan(module_name, module_path, search_path): def scan(module_name, module_path, search_path):
# type: (str, str, list[str]) -> list[(str, str, bool)]
"""Return a list of (name, path, is_package) for ansible.module_utils
imports used by an Ansible module.
"""
log = LOG.getChild('scan')
log.debug('%r, %r, %r', module_name, module_path, search_path)
if sys.version_info >= (3, 4):
result = _scan_importlib_find_spec(
module_name, module_path, search_path,
)
log.debug('_scan_importlib_find_spec %r', result)
else:
result = _scan_imp_find_module(module_name, module_path, search_path)
log.debug('_scan_imp_find_module %r', result)
return result
def _scan_importlib_find_spec(module_name, module_path, search_path):
# type: (str, str, list[str]) -> list[(str, str, bool)]
module = importlib.machinery.ModuleSpec(
module_name, loader=None, origin=module_path,
)
prefix = importlib.machinery.ModuleSpec(
PREFIX.rstrip('.'), loader=None,
)
prefix.submodule_search_locations = search_path
queue = collections.deque([module])
specs = {prefix.name: prefix}
while queue:
spec = queue.popleft()
if spec.origin is None:
continue
try:
with open(spec.origin, 'rb') as f:
code = compile(f.read(), spec.name, 'exec')
except Exception as exc:
raise ValueError((exc, module, spec, specs))
for name in walk_imports(code, prefix.name):
if name in specs:
continue
parent_name = name.rpartition('.')[0]
parent = specs[parent_name]
if parent is None or not parent.submodule_search_locations:
specs[name] = None
continue
child = importlib.util._find_spec(
name, parent.submodule_search_locations,
)
if child is None or child.origin is None:
specs[name] = None
continue
specs[name] = child
queue.append(child)
del specs[prefix.name]
return sorted(
(spec.name, spec.origin, spec.submodule_search_locations is not None)
for spec in specs.values() if spec is not None
)
def _scan_imp_find_module(module_name, module_path, search_path):
# type: (str, str, list[str]) -> list[(str, str, bool)]
module = Module(module_name, module_path, imp.PY_SOURCE, None) module = Module(module_name, module_path, imp.PY_SOURCE, None)
stack = [module] stack = [module]
seen = set() seen = set()
......
...@@ -26,8 +26,9 @@ ...@@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals from __future__ import unicode_literals
__metaclass__ = type
import mitogen.core import mitogen.core
......
...@@ -34,18 +34,20 @@ files/modules known missing. ...@@ -34,18 +34,20 @@ files/modules known missing.
[0] "Ansible Module Architecture", developing_program_flow_modules.html [0] "Ansible Module Architecture", developing_program_flow_modules.html
""" """
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals from __future__ import unicode_literals
__metaclass__ = type
import json import json
import logging import logging
import os import os
import random import random
import re
from ansible.executor import module_common import ansible.collections.list
import ansible.errors import ansible.errors
import ansible.module_utils import ansible.executor.module_common
import ansible.release
import mitogen.core import mitogen.core
import mitogen.select import mitogen.select
...@@ -57,7 +59,8 @@ import ansible_mitogen.target ...@@ -57,7 +59,8 @@ import ansible_mitogen.target
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
NO_METHOD_MSG = 'Mitogen: no invocation method found for: ' NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line' NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
NO_MODULE_MSG = 'The module %s was not found in configured module paths.' # NOTE: Ansible 2.10 no longer has a `.` at the end of NO_MODULE_MSG error
NO_MODULE_MSG = 'The module %s was not found in configured module paths'
_planner_by_path = {} _planner_by_path = {}
...@@ -96,6 +99,13 @@ class Invocation(object): ...@@ -96,6 +99,13 @@ class Invocation(object):
#: Initially ``None``, but set by :func:`invoke`. The raw source or #: Initially ``None``, but set by :func:`invoke`. The raw source or
#: binary contents of the module. #: binary contents of the module.
self._module_source = None self._module_source = None
#: Initially ``{}``, but set by :func:`invoke`. Optional source to send
#: to :func:`propagate_paths_and_modules` to fix Python3.5 relative import errors
self._overridden_sources = {}
#: Initially ``set()``, but set by :func:`invoke`. Optional source paths to send
#: to :func:`propagate_paths_and_modules` to handle loading source dependencies from
#: places outside of the main source path, such as collections
self._extra_sys_paths = set()
def get_module_source(self): def get_module_source(self):
if self._module_source is None: if self._module_source is None:
...@@ -182,7 +192,7 @@ class BinaryPlanner(Planner): ...@@ -182,7 +192,7 @@ class BinaryPlanner(Planner):
@classmethod @classmethod
def detect(cls, path, source): def detect(cls, path, source):
return module_common._is_binary(source) return ansible.executor.module_common._is_binary(source)
def get_push_files(self): def get_push_files(self):
return [mitogen.core.to_text(self._inv.module_path)] return [mitogen.core.to_text(self._inv.module_path)]
...@@ -259,7 +269,7 @@ class JsonArgsPlanner(ScriptPlanner): ...@@ -259,7 +269,7 @@ class JsonArgsPlanner(ScriptPlanner):
@classmethod @classmethod
def detect(cls, path, source): def detect(cls, path, source):
return module_common.REPLACER_JSONARGS in source return ansible.executor.module_common.REPLACER_JSONARGS in source
class WantJsonPlanner(ScriptPlanner): class WantJsonPlanner(ScriptPlanner):
...@@ -288,11 +298,11 @@ class NewStylePlanner(ScriptPlanner): ...@@ -288,11 +298,11 @@ class NewStylePlanner(ScriptPlanner):
preprocessing the module. preprocessing the module.
""" """
runner_name = 'NewStyleRunner' runner_name = 'NewStyleRunner'
marker = b'from ansible.module_utils.' MARKER = re.compile(br'from ansible(?:_collections|\.module_utils)\.')
@classmethod @classmethod
def detect(cls, path, source): def detect(cls, path, source):
return cls.marker in source return cls.MARKER.search(source) is not None
def _get_interpreter(self): def _get_interpreter(self):
return None, None return None, None
...@@ -312,6 +322,8 @@ class NewStylePlanner(ScriptPlanner): ...@@ -312,6 +322,8 @@ class NewStylePlanner(ScriptPlanner):
ALWAYS_FORK_MODULES = frozenset([ ALWAYS_FORK_MODULES = frozenset([
'dnf', # issue #280; py-dnf/hawkey need therapy 'dnf', # issue #280; py-dnf/hawkey need therapy
'firewalld', # issue #570: ansible module_utils caches dbus conn 'firewalld', # issue #570: ansible module_utils caches dbus conn
'ansible.legacy.dnf', # issue #776
'ansible.builtin.dnf', # issue #832
]) ])
def should_fork(self): def should_fork(self):
...@@ -351,7 +363,7 @@ class NewStylePlanner(ScriptPlanner): ...@@ -351,7 +363,7 @@ class NewStylePlanner(ScriptPlanner):
module_name='ansible_module_%s' % (self._inv.module_name,), module_name='ansible_module_%s' % (self._inv.module_name,),
module_path=self._inv.module_path, module_path=self._inv.module_path,
search_path=self.get_search_path(), search_path=self.get_search_path(),
builtin_path=module_common._MODULE_UTILS_PATH, builtin_path=ansible.executor.module_common._MODULE_UTILS_PATH,
context=self._inv.connection.context, context=self._inv.connection.context,
) )
return self._module_map return self._module_map
...@@ -394,7 +406,7 @@ class ReplacerPlanner(NewStylePlanner): ...@@ -394,7 +406,7 @@ class ReplacerPlanner(NewStylePlanner):
@classmethod @classmethod
def detect(cls, path, source): def detect(cls, path, source):
return module_common.REPLACER in source return ansible.executor.module_common.REPLACER in source
class OldStylePlanner(ScriptPlanner): class OldStylePlanner(ScriptPlanner):
...@@ -416,37 +428,22 @@ _planners = [ ...@@ -416,37 +428,22 @@ _planners = [
] ]
try:
_get_ansible_module_fqn = module_common._get_ansible_module_fqn
except AttributeError:
_get_ansible_module_fqn = None
def py_modname_from_path(name, path): def py_modname_from_path(name, path):
""" """
Fetch the logical name of a new-style module as it might appear in Fetch the logical name of a new-style module as it might appear in
:data:`sys.modules` of the target's Python interpreter. :data:`sys.modules` of the target's Python interpreter.
* For Ansible <2.7, this is an unpackaged module named like
"ansible_module_%s".
* For Ansible <2.9, this is an unpackaged module named like
"ansible.modules.%s"
* Since Ansible 2.9, modules appearing within a package have the original * Since Ansible 2.9, modules appearing within a package have the original
package hierarchy approximated on the target, enabling relative imports package hierarchy approximated on the target, enabling relative imports
to function correctly. For example, "ansible.modules.system.setup". to function correctly. For example, "ansible.modules.system.setup".
""" """
# 2.9+
if _get_ansible_module_fqn:
try: try:
return _get_ansible_module_fqn(path) return ansible.executor.module_common._get_ansible_module_fqn(path)
except AttributeError:
pass
except ValueError: except ValueError:
pass pass
if ansible.__version__ < '2.7':
return 'ansible_module_' + name
return 'ansible.modules.' + name return 'ansible.modules.' + name
...@@ -475,7 +472,10 @@ def _propagate_deps(invocation, planner, context): ...@@ -475,7 +472,10 @@ def _propagate_deps(invocation, planner, context):
context=context, context=context,
paths=planner.get_push_files(), paths=planner.get_push_files(),
modules=planner.get_module_deps(), # modules=planner.get_module_deps(), TODO
overridden_sources=invocation._overridden_sources,
# needs to be a list because can't unpickle() a set()
extra_sys_paths=list(invocation._extra_sys_paths),
) )
...@@ -524,18 +524,52 @@ def _invoke_isolated_task(invocation, planner): ...@@ -524,18 +524,52 @@ def _invoke_isolated_task(invocation, planner):
context.shutdown() context.shutdown()
def _get_planner(name, path, source): def _get_planner(invocation, source):
for klass in _planners: for klass in _planners:
if klass.detect(path, source): if klass.detect(invocation.module_path, source):
LOG.debug('%r accepted %r (filename %r)', klass, name, path) LOG.debug(
'%r accepted %r (filename %r)',
klass, invocation.module_name, invocation.module_path,
)
return klass return klass
LOG.debug('%r rejected %r', klass, name) LOG.debug('%r rejected %r', klass, invocation.module_name)
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation)) raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
def _fix_py35(invocation, module_source):
"""
super edge case with a relative import error in Python 3.5.1-3.5.3
in Ansible's setup module when using Mitogen
https://github.com/dw/mitogen/issues/672#issuecomment-636408833
We replace a relative import in the setup module with the actual full file path
This works in vanilla Ansible but not in Mitogen otherwise
"""
if invocation.module_name in {'ansible.builtin.setup', 'ansible.legacy.setup', 'setup'} and \
invocation.module_path not in invocation._overridden_sources:
# in-memory replacement of setup module's relative import
# would check for just python3.5 and run this then but we don't know the
# target python at this time yet
# NOTE: another ansible 2.10-specific fix: `from ..module_utils` used to be `from ...module_utils`
module_source = module_source.replace(
b"from ..module_utils.basic import AnsibleModule",
b"from ansible.module_utils.basic import AnsibleModule"
)
invocation._overridden_sources[invocation.module_path] = module_source
def _load_collections(invocation):
"""
Special loader that ensures that `ansible_collections` exist as a module path for import
Goes through all collection path possibilities and stores paths to installed collections
Stores them on the current invocation to later be passed to the master service
"""
for collection_path in ansible.collections.list.list_collection_dirs():
invocation._extra_sys_paths.add(collection_path.decode('utf-8'))
def invoke(invocation): def invoke(invocation):
""" """
Find a Planner subclass corresnding to `invocation` and use it to invoke Find a Planner subclass corresponding to `invocation` and use it to invoke
the module. the module.
:param Invocation invocation: :param Invocation invocation:
...@@ -555,10 +589,14 @@ def invoke(invocation): ...@@ -555,10 +589,14 @@ def invoke(invocation):
invocation.module_path = mitogen.core.to_text(path) invocation.module_path = mitogen.core.to_text(path)
if invocation.module_path not in _planner_by_path: if invocation.module_path not in _planner_by_path:
if 'ansible_collections' in invocation.module_path:
_load_collections(invocation)
module_source = invocation.get_module_source()
_fix_py35(invocation, module_source)
_planner_by_path[invocation.module_path] = _get_planner( _planner_by_path[invocation.module_path] = _get_planner(
invocation.module_name, invocation,
invocation.module_path, module_source
invocation.get_module_source()
) )
planner = _planner_by_path[invocation.module_path](invocation) planner = _planner_by_path[invocation.module_path](invocation)
......
...@@ -18,23 +18,17 @@ from __future__ import (absolute_import, division, print_function) ...@@ -18,23 +18,17 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os import os
import base64
from ansible.module_utils._text import to_bytes from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleActionSkip
from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum, md5, secure_hash from ansible.utils.display import Display
from ansible.utils.path import makedirs_safe from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
from ansible.utils.path import makedirs_safe, is_subpath
REMOTE_CHECKSUM_ERRORS = { display = Display()
'0': "unable to calculate the checksum of the remote file",
'1': "the remote file does not exist",
'2': "no read permission on remote file",
'3': "remote file is a directory, fetch cannot work on directories",
'4': "python isn't present on the system. Unable to compute checksum",
'5': "stdlib json was not found on the remote machine. Only the raw module can work without those installed",
}
class ActionModule(ActionBase): class ActionModule(ActionBase):
...@@ -45,36 +39,94 @@ class ActionModule(ActionBase): ...@@ -45,36 +39,94 @@ class ActionModule(ActionBase):
task_vars = dict() task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars) result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
try: try:
if self._play_context.check_mode: if self._play_context.check_mode:
result['skipped'] = True raise AnsibleActionSkip('check mode not (yet) supported for this module')
result['msg'] = 'check mode not (yet) supported for this module'
return result
source = self._task.args.get('src', None)
original_dest = dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'), strict=False) flat = boolean(self._task.args.get('flat'), strict=False)
fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False) fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
validate_checksum = boolean(self._task.args.get('validate_checksum', True), strict=False) validate_checksum = boolean(self._task.args.get('validate_checksum', True), strict=False)
msg = ''
# validate source and dest are strings FIXME: use basic.py and module specs # validate source and dest are strings FIXME: use basic.py and module specs
source = self._task.args.get('src')
if not isinstance(source, string_types): if not isinstance(source, string_types):
result['msg'] = "Invalid type supplied for source option, it must be a string" msg = "Invalid type supplied for source option, it must be a string"
dest = self._task.args.get('dest')
if not isinstance(dest, string_types): if not isinstance(dest, string_types):
result['msg'] = "Invalid type supplied for dest option, it must be a string" msg = "Invalid type supplied for dest option, it must be a string"
if result.get('msg'): if source is None or dest is None:
result['failed'] = True msg = "src and dest are required"
return result
if msg:
raise AnsibleActionFail(msg)
source = self._connection._shell.join_path(source) source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source) source = self._remote_expand_user(source)
# calculate checksum for the remote file, don't bother if using remote_stat = {}
# become as slurp will be used Force remote_checksum to follow remote_checksum = None
# symlinks because fetch always follows symlinks if True:
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) # Get checksum for the remote file even using become. Mitogen doesn't need slurp.
# Follow symlinks because fetch always follows symlinks
try:
remote_stat = self._execute_remote_stat(source, all_vars=task_vars, follow=True)
except AnsibleError as ae:
result['changed'] = False
result['file'] = source
if fail_on_missing:
result['failed'] = True
result['msg'] = to_text(ae)
else:
result['msg'] = "%s, ignored" % to_text(ae, errors='surrogate_or_replace')
return result
remote_checksum = remote_stat.get('checksum')
if remote_stat.get('exists'):
if remote_stat.get('isdir'):
result['failed'] = True
result['changed'] = False
result['msg'] = "remote file is a directory, fetch cannot work on directories"
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if not fail_on_missing:
result['msg'] += ", not transferring, ignored"
del result['changed']
del result['failed']
return result
# use slurp if permissions are lacking or privilege escalation is needed
remote_data = None
if remote_checksum in (None, '1', ''):
slurpres = self._execute_module(module_name='ansible.legacy.slurp', module_args=dict(src=source), task_vars=task_vars)
if slurpres.get('failed'):
if not fail_on_missing:
result['file'] = source
result['changed'] = False
else:
result.update(slurpres)
if 'not found' in slurpres.get('msg', ''):
result['msg'] = "the remote file does not exist, not transferring, ignored"
elif slurpres.get('msg', '').startswith('source is a directory'):
result['msg'] = "remote file is a directory, fetch cannot work on directories"
return result
else:
if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
if remote_data is not None:
remote_checksum = checksum_s(remote_data)
# calculate the destination name # calculate the destination name
if os.path.sep not in self._connection._shell.join_path('a', ''): if os.path.sep not in self._connection._shell.join_path('a', ''):
...@@ -83,13 +135,14 @@ class ActionModule(ActionBase): ...@@ -83,13 +135,14 @@ class ActionModule(ActionBase):
else: else:
source_local = source source_local = source
dest = os.path.expanduser(dest) # ensure we only use file name, avoid relative paths
if not is_subpath(dest, original_dest):
# TODO: ? dest = os.path.expanduser(dest.replace(('../','')))
raise AnsibleActionFail("Detected directory traversal, expected to be contained in '%s' but got '%s'" % (original_dest, dest))
if flat: if flat:
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep): if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory" raise AnsibleActionFail("dest is an existing directory, use a trailing slash if you want to fetch src into that directory")
result['file'] = dest
result['failed'] = True
return result
if dest.endswith(os.sep): if dest.endswith(os.sep):
# if the path ends with "/", we'll use the source filename as the # if the path ends with "/", we'll use the source filename as the
# destination filename # destination filename
...@@ -106,23 +159,7 @@ class ActionModule(ActionBase): ...@@ -106,23 +159,7 @@ class ActionModule(ActionBase):
target_name = self._play_context.remote_addr target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local) dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
dest = dest.replace("//", "/") dest = os.path.normpath(dest)
if remote_checksum in REMOTE_CHECKSUM_ERRORS:
result['changed'] = False
result['file'] = source
result['msg'] = REMOTE_CHECKSUM_ERRORS[remote_checksum]
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if fail_on_missing:
result['failed'] = True
del result['changed']
else:
result['msg'] += ", not transferring, ignored"
return result
# calculate checksum for the local file # calculate checksum for the local file
local_checksum = checksum(dest) local_checksum = checksum(dest)
...@@ -132,7 +169,15 @@ class ActionModule(ActionBase): ...@@ -132,7 +169,15 @@ class ActionModule(ActionBase):
makedirs_safe(os.path.dirname(dest)) makedirs_safe(os.path.dirname(dest))
# fetch the file and check for changes # fetch the file and check for changes
if remote_data is None:
self._connection.fetch_file(source, dest) self._connection.fetch_file(source, dest)
else:
try:
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
raise AnsibleActionFail("Failed to fetch the file: %s" % e)
new_checksum = secure_hash(dest) new_checksum = secure_hash(dest)
# For backwards compatibility. We'll return None on FIPS enabled systems # For backwards compatibility. We'll return None on FIPS enabled systems
try: try:
......
...@@ -26,14 +26,15 @@ ...@@ -26,14 +26,15 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
""" """
Fetch the connection configuration stack that would be used to connect to a Fetch the connection configuration stack that would be used to connect to a
target, without actually connecting to it. target, without actually connecting to it.
""" """
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import ansible_mitogen.connection import ansible_mitogen.connection
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
...@@ -52,4 +53,6 @@ class ActionModule(ActionBase): ...@@ -52,4 +53,6 @@ class ActionModule(ActionBase):
'changed': True, 'changed': True,
'result': stack, 'result': stack,
'_ansible_verbose_always': True, '_ansible_verbose_always': True,
# for ansible < 2.8, we'll default to /usr/bin/python like before
'discovered_interpreter': self._connection._action._discovered_interpreter
} }
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -27,12 +27,13 @@ ...@@ -27,12 +27,13 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
from ansible.errors import AnsibleConnectionFailure import ansible.errors
from ansible.module_utils.six import iteritems
try: try:
import ansible_mitogen import ansible_mitogen
...@@ -45,17 +46,11 @@ import ansible_mitogen.connection ...@@ -45,17 +46,11 @@ import ansible_mitogen.connection
import ansible_mitogen.loaders import ansible_mitogen.loaders
_class = ansible_mitogen.loaders.connection_loader__get( _get_result = ansible_mitogen.loaders.connection_loader__get(
'kubectl', 'kubectl',
class_only=True, class_only=True,
) )
if _class:
kubectl = sys.modules[_class.__module__]
del _class
else:
kubectl = None
class Connection(ansible_mitogen.connection.Connection): class Connection(ansible_mitogen.connection.Connection):
transport = 'kubectl' transport = 'kubectl'
...@@ -66,14 +61,22 @@ class Connection(ansible_mitogen.connection.Connection): ...@@ -66,14 +61,22 @@ class Connection(ansible_mitogen.connection.Connection):
) )
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
if kubectl is None: if not _get_result:
raise AnsibleConnectionFailure(self.not_supported_msg) raise ansible.errors.AnsibleConnectionFailure(self.not_supported_msg)
super(Connection, self).__init__(*args, **kwargs) super(Connection, self).__init__(*args, **kwargs)
def get_extra_args(self): def get_extra_args(self):
try:
# Ansible < 2.10, _get_result is the connection class
connection_options = _get_result.connection_options
except AttributeError:
# Ansible >= 2.10, _get_result is a get_with_context_result
connection_options = _get_result.object.connection_options
parameters = [] parameters = []
for key, option in iteritems(kubectl.CONNECTION_OPTIONS): for key in connection_options:
if self.get_task_var('ansible_' + key) is not None: task_var_name = 'ansible_%s' % key
parameters += [ option, self.get_task_var('ansible_' + key) ] task_var = self.get_task_var(task_var_name)
if task_var is not None:
parameters += [connection_options[key], task_var]
return parameters return parameters
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
# Copyright 2022, Mitogen contributers
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path
import sys
try:
import ansible_mitogen
except ImportError:
base_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
del base_dir
import ansible_mitogen.connection
class Connection(ansible_mitogen.connection.Connection):
transport = 'podman'
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
...@@ -40,6 +42,24 @@ DOCUMENTATION = """ ...@@ -40,6 +42,24 @@ DOCUMENTATION = """
accepts. accepts.
version_added: "2.5" version_added: "2.5"
options: options:
ssh_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_args
- name: ansible_mitogen_ssh_args
ssh_common_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_common_args
- name: ansible_mitogen_ssh_common_args
ssh_extra_args:
type: str
vars:
- name: ssh_args
- name: ansible_ssh_extra_args
- name: ansible_mitogen_ssh_extra_args
""" """
try: try:
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os.path import os.path
import sys import sys
......
...@@ -26,7 +26,9 @@ ...@@ -26,7 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
__metaclass__ = type
import atexit import atexit
import logging import logging
import multiprocessing import multiprocessing
...@@ -178,42 +180,6 @@ def setup_pool(pool): ...@@ -178,42 +180,6 @@ def setup_pool(pool):
LOG.debug('Service pool configured: size=%d', pool.size) LOG.debug('Service pool configured: size=%d', pool.size)
def _setup_simplejson(responder):
"""
We support serving simplejson for Python 2.4 targets on Ansible 2.3, at
least so the package's own CI Docker scripts can run without external
help, however newer versions of simplejson no longer support Python
2.4. Therefore override any installed/loaded version with a
2.4-compatible version we ship in the compat/ directory.
"""
responder.whitelist_prefix('simplejson')
# issue #536: must be at end of sys.path, in case existing newer
# version is already loaded.
compat_path = os.path.join(os.path.dirname(__file__), 'compat')
sys.path.append(compat_path)
for fullname, is_pkg, suffix in (
(u'simplejson', True, '__init__.py'),
(u'simplejson.decoder', False, 'decoder.py'),
(u'simplejson.encoder', False, 'encoder.py'),
(u'simplejson.scanner', False, 'scanner.py'),
):
path = os.path.join(compat_path, 'simplejson', suffix)
fp = open(path, 'rb')
try:
source = fp.read()
finally:
fp.close()
responder.add_source_override(
fullname=fullname,
path=path,
source=source,
is_pkg=is_pkg,
)
def _setup_responder(responder): def _setup_responder(responder):
""" """
Configure :class:`mitogen.master.ModuleResponder` to only permit Configure :class:`mitogen.master.ModuleResponder` to only permit
...@@ -221,7 +187,6 @@ def _setup_responder(responder): ...@@ -221,7 +187,6 @@ def _setup_responder(responder):
""" """
responder.whitelist_prefix('ansible') responder.whitelist_prefix('ansible')
responder.whitelist_prefix('ansible_mitogen') responder.whitelist_prefix('ansible_mitogen')
_setup_simplejson(responder)
# Ansible 2.3 is compatible with Python 2.4 targets, however # Ansible 2.3 is compatible with Python 2.4 targets, however
# ansible/__init__.py is not. Instead, executor/module_common.py writes # ansible/__init__.py is not. Instead, executor/module_common.py writes
......
...@@ -36,8 +36,11 @@ Each class in here has a corresponding Planner class in planners.py that knows ...@@ -36,8 +36,11 @@ Each class in here has a corresponding Planner class in planners.py that knows
how to build arguments for it, preseed related data, etc. how to build arguments for it, preseed related data, etc.
""" """
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import atexit import atexit
import imp import json
import os import os
import re import re
import shlex import shlex
...@@ -61,10 +64,12 @@ except ImportError: ...@@ -61,10 +64,12 @@ except ImportError:
ctypes = None ctypes = None
try: try:
import json # Python >= 3.4, PEP 451 ModuleSpec API
import importlib.machinery
import importlib.util
except ImportError: except ImportError:
# Python 2.4 # Python < 3.4, PEP 302 Import Hooks
import simplejson as json import imp
try: try:
# Cannot use cStringIO as it does not support Unicode. # Cannot use cStringIO as it does not support Unicode.
...@@ -516,10 +521,71 @@ class ModuleUtilsImporter(object): ...@@ -516,10 +521,71 @@ class ModuleUtilsImporter(object):
sys.modules.pop(fullname, None) sys.modules.pop(fullname, None)
def find_module(self, fullname, path=None): def find_module(self, fullname, path=None):
"""
Return a loader for the module with fullname, if we will load it.
Implements importlib.abc.MetaPathFinder.find_module().
Deprecrated in Python 3.4+, replaced by find_spec().
Raises ImportWarning in Python 3.10+. Removed in Python 3.12.
"""
if fullname in self._by_fullname: if fullname in self._by_fullname:
return self return self
def find_spec(self, fullname, path, target=None):
"""
Return a `ModuleSpec` for module with `fullname` if we will load it.
Otherwise return `None`.
Implements importlib.abc.MetaPathFinder.find_spec(). Python 3.4+.
"""
if fullname.endswith('.'):
return None
try:
module_path, is_package = self._by_fullname[fullname]
except KeyError:
LOG.debug('Skipping %s: not present', fullname)
return None
LOG.debug('Handling %s', fullname)
origin = 'master:%s' % (module_path,)
return importlib.machinery.ModuleSpec(
fullname, loader=self, origin=origin, is_package=is_package,
)
def create_module(self, spec):
"""
Return a module object for the given ModuleSpec.
Implements PEP-451 importlib.abc.Loader API introduced in Python 3.4.
Unlike Loader.load_module() this shouldn't populate sys.modules or
set module attributes. Both are done by Python.
"""
module = types.ModuleType(spec.name)
# FIXME create_module() shouldn't initialise module attributes
module.__file__ = spec.origin
return module
def exec_module(self, module):
"""
Execute the module to initialise it. Don't return anything.
Implements PEP-451 importlib.abc.Loader API, introduced in Python 3.4.
"""
spec = module.__spec__
path, _ = self._by_fullname[spec.name]
source = ansible_mitogen.target.get_small_file(self._context, path)
code = compile(source, path, 'exec', 0, 1)
exec(code, module.__dict__)
self._loaded.add(spec.name)
def load_module(self, fullname): def load_module(self, fullname):
"""
Return the loaded module specified by fullname.
Implements PEP 302 importlib.abc.Loader.load_module().
Deprecated in Python 3.4+, replaced by create_module() & exec_module().
"""
path, is_pkg = self._by_fullname[fullname] path, is_pkg = self._by_fullname[fullname]
source = ansible_mitogen.target.get_small_file(self._context, path) source = ansible_mitogen.target.get_small_file(self._context, path)
code = compile(source, path, 'exec', 0, 1) code = compile(source, path, 'exec', 0, 1)
...@@ -820,12 +886,17 @@ class NewStyleRunner(ScriptRunner): ...@@ -820,12 +886,17 @@ class NewStyleRunner(ScriptRunner):
synchronization mechanism by importing everything the module will need synchronization mechanism by importing everything the module will need
prior to detaching. prior to detaching.
""" """
# I think "custom" means "found in custom module_utils search path",
# e.g. playbook relative dir, ~/.ansible/..., Ansible collection.
for fullname, _, _ in self.module_map['custom']: for fullname, _, _ in self.module_map['custom']:
mitogen.core.import_module(fullname) mitogen.core.import_module(fullname)
# I think "builtin" means "part of ansible/ansible-base/ansible-core",
# as opposed to Python builtin modules such as sys.
for fullname in self.module_map['builtin']: for fullname in self.module_map['builtin']:
try: try:
mitogen.core.import_module(fullname) mitogen.core.import_module(fullname)
except ImportError: except ImportError as exc:
# #590: Ansible 2.8 module_utils.distro is a package that # #590: Ansible 2.8 module_utils.distro is a package that
# replaces itself in sys.modules with a non-package during # replaces itself in sys.modules with a non-package during
# import. Prior to replacement, it is a real package containing # import. Prior to replacement, it is a real package containing
...@@ -836,7 +907,17 @@ class NewStyleRunner(ScriptRunner): ...@@ -836,7 +907,17 @@ class NewStyleRunner(ScriptRunner):
# loop progresses to the next entry and attempts to preload # loop progresses to the next entry and attempts to preload
# 'distro._distro', the import mechanism will fail. So here we # 'distro._distro', the import mechanism will fail. So here we
# silently ignore any failure for it. # silently ignore any failure for it.
if fullname != 'ansible.module_utils.distro._distro': if fullname == 'ansible.module_utils.distro._distro':
continue
# ansible.module_utils.compat.selinux raises ImportError if it
# can't load libselinux.so. The importer would usually catch
# this & skip selinux operations. We don't care about selinux,
# we're using import to get a copy of the module.
if (fullname == 'ansible.module_utils.compat.selinux'
and exc.msg == 'unable to load libselinux.so'):
continue
raise raise
def _setup_excepthook(self): def _setup_excepthook(self):
......
...@@ -39,23 +39,23 @@ connections, grant access to files by children, and register for notification ...@@ -39,23 +39,23 @@ connections, grant access to files by children, and register for notification
when a child has completed a job. when a child has completed a job.
""" """
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals from __future__ import unicode_literals
__metaclass__ = type
import logging import logging
import os import os
import os.path
import sys import sys
import threading import threading
import ansible.constants import ansible.constants
import mitogen import mitogen.core
import mitogen.service import mitogen.service
import mitogen.utils
import ansible_mitogen.loaders import ansible_mitogen.loaders
import ansible_mitogen.module_finder import ansible_mitogen.module_finder
import ansible_mitogen.target import ansible_mitogen.target
import ansible_mitogen.utils.unsafe
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
...@@ -91,7 +91,7 @@ def _get_candidate_temp_dirs(): ...@@ -91,7 +91,7 @@ def _get_candidate_temp_dirs():
remote_tmp = ansible.constants.DEFAULT_REMOTE_TMP remote_tmp = ansible.constants.DEFAULT_REMOTE_TMP
system_tmpdirs = ('/var/tmp', '/tmp') system_tmpdirs = ('/var/tmp', '/tmp')
return mitogen.utils.cast([remote_tmp] + list(system_tmpdirs)) return ansible_mitogen.utils.unsafe.cast([remote_tmp] + list(system_tmpdirs))
def key_from_dict(**kwargs): def key_from_dict(**kwargs):
...@@ -170,6 +170,12 @@ class ContextService(mitogen.service.Service): ...@@ -170,6 +170,12 @@ class ContextService(mitogen.service.Service):
""" """
LOG.debug('%r.reset(%r)', self, stack) LOG.debug('%r.reset(%r)', self, stack)
# this could happen if we have a `shutdown -r` shell command
# and then a `wait_for_connection` right afterwards
# in this case, we have no stack to disconnect from
if not stack:
return False
l = mitogen.core.Latch() l = mitogen.core.Latch()
context = None context = None
with self._lock: with self._lock:
......
...@@ -26,8 +26,9 @@ ...@@ -26,8 +26,9 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import from __future__ import absolute_import, division, print_function
import distutils.version __metaclass__ = type
import os import os
import signal import signal
import threading import threading
...@@ -43,52 +44,8 @@ import ansible_mitogen.loaders ...@@ -43,52 +44,8 @@ import ansible_mitogen.loaders
import ansible_mitogen.mixins import ansible_mitogen.mixins
import ansible_mitogen.process import ansible_mitogen.process
import ansible
import ansible.executor.process.worker import ansible.executor.process.worker
import ansible.utils.sentinel
try:
# 2.8+ has a standardized "unset" object.
from ansible.utils.sentinel import Sentinel
except ImportError:
Sentinel = None
ANSIBLE_VERSION_MIN = (2, 3)
ANSIBLE_VERSION_MAX = (2, 9)
NEW_VERSION_MSG = (
"Your Ansible version (%s) is too recent. The most recent version\n"
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
"release notes to see if a new version is available, otherwise\n"
"subscribe to the corresponding GitHub issue to be notified when\n"
"support becomes available.\n"
"\n"
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
" https://github.com/dw/mitogen/issues/\n"
)
OLD_VERSION_MSG = (
"Your version of Ansible (%s) is too old. The oldest version supported by "
"Mitogen for Ansible is %s."
)
def _assert_supported_release():
"""
Throw AnsibleError with a descriptive message in case of being loaded into
an unsupported Ansible release.
"""
v = ansible.__version__
if not isinstance(v, tuple):
v = tuple(distutils.version.LooseVersion(v).version)
if v[:2] < ANSIBLE_VERSION_MIN:
raise ansible.errors.AnsibleError(
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
)
if v[:2] > ANSIBLE_VERSION_MAX:
raise ansible.errors.AnsibleError(
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
)
def _patch_awx_callback(): def _patch_awx_callback():
...@@ -99,12 +56,11 @@ def _patch_awx_callback(): ...@@ -99,12 +56,11 @@ def _patch_awx_callback():
# AWX uses sitecustomize.py to force-load this package. If it exists, we're # AWX uses sitecustomize.py to force-load this package. If it exists, we're
# running under AWX. # running under AWX.
try: try:
from awx_display_callback.events import EventContext import awx_display_callback.events
from awx_display_callback.events import event_context
except ImportError: except ImportError:
return return
if hasattr(EventContext(), '_local'): if hasattr(awx_display_callback.events.EventContext(), '_local'):
# Patched version. # Patched version.
return return
...@@ -113,8 +69,8 @@ def _patch_awx_callback(): ...@@ -113,8 +69,8 @@ def _patch_awx_callback():
ctx = tls.setdefault('_ctx', {}) ctx = tls.setdefault('_ctx', {})
ctx.update(kwargs) ctx.update(kwargs)
EventContext._local = threading.local() awx_display_callback.events.EventContext._local = threading.local()
EventContext.add_local = patch_add_local awx_display_callback.events.EventContext.add_local = patch_add_local
_patch_awx_callback() _patch_awx_callback()
...@@ -132,7 +88,6 @@ def wrap_action_loader__get(name, *args, **kwargs): ...@@ -132,7 +88,6 @@ def wrap_action_loader__get(name, *args, **kwargs):
get_kwargs = {'class_only': True} get_kwargs = {'class_only': True}
if name in ('fetch',): if name in ('fetch',):
name = 'mitogen_' + name name = 'mitogen_' + name
if ansible.__version__ >= '2.8':
get_kwargs['collection_list'] = kwargs.pop('collection_list', None) get_kwargs['collection_list'] = kwargs.pop('collection_list', None)
klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs) klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs)
...@@ -153,6 +108,7 @@ REDIRECTED_CONNECTION_PLUGINS = ( ...@@ -153,6 +108,7 @@ REDIRECTED_CONNECTION_PLUGINS = (
'lxc', 'lxc',
'lxd', 'lxd',
'machinectl', 'machinectl',
'podman',
'setns', 'setns',
'ssh', 'ssh',
) )
...@@ -217,7 +173,7 @@ class AnsibleWrappers(object): ...@@ -217,7 +173,7 @@ class AnsibleWrappers(object):
with references to the real functions. with references to the real functions.
""" """
ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get
ansible_mitogen.loaders.connection_loader.get = wrap_connection_loader__get ansible_mitogen.loaders.connection_loader.get_with_context = wrap_connection_loader__get
global worker__run global worker__run
worker__run = ansible.executor.process.worker.WorkerProcess.run worker__run = ansible.executor.process.worker.WorkerProcess.run
...@@ -230,7 +186,7 @@ class AnsibleWrappers(object): ...@@ -230,7 +186,7 @@ class AnsibleWrappers(object):
ansible_mitogen.loaders.action_loader.get = ( ansible_mitogen.loaders.action_loader.get = (
ansible_mitogen.loaders.action_loader__get ansible_mitogen.loaders.action_loader__get
) )
ansible_mitogen.loaders.connection_loader.get = ( ansible_mitogen.loaders.connection_loader.get_with_context = (
ansible_mitogen.loaders.connection_loader__get ansible_mitogen.loaders.connection_loader__get
) )
ansible.executor.process.worker.WorkerProcess.run = worker__run ansible.executor.process.worker.WorkerProcess.run = worker__run
...@@ -324,7 +280,7 @@ class StrategyMixin(object): ...@@ -324,7 +280,7 @@ class StrategyMixin(object):
name=task.action, name=task.action,
class_only=True, class_only=True,
) )
if play_context.connection is not Sentinel: if play_context.connection is not ansible.utils.sentinel.Sentinel:
# 2.8 appears to defer computing this until inside the worker. # 2.8 appears to defer computing this until inside the worker.
# TODO: figure out where it has moved. # TODO: figure out where it has moved.
ansible_mitogen.loaders.connection_loader.get( ansible_mitogen.loaders.connection_loader.get(
...@@ -352,7 +308,6 @@ class StrategyMixin(object): ...@@ -352,7 +308,6 @@ class StrategyMixin(object):
Wrap :meth:`run` to ensure requisite infrastructure and modifications Wrap :meth:`run` to ensure requisite infrastructure and modifications
are configured for the duration of the call. are configured for the duration of the call.
""" """
_assert_supported_release()
wrappers = AnsibleWrappers() wrappers = AnsibleWrappers()
self._worker_model = self._get_worker_model() self._worker_model = self._get_worker_model()
ansible_mitogen.process.set_worker_model(self._worker_model) ansible_mitogen.process.set_worker_model(self._worker_model)
......
...@@ -33,8 +33,12 @@ Helper functions intended to be executed on the target. These are entrypoints ...@@ -33,8 +33,12 @@ Helper functions intended to be executed on the target. These are entrypoints
for file transfer, module execution and sundry bits like changing file modes. for file transfer, module execution and sundry bits like changing file modes.
""" """
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import errno import errno
import grp import grp
import json
import operator import operator
import os import os
import pwd import pwd
...@@ -51,16 +55,10 @@ import types ...@@ -51,16 +55,10 @@ import types
logging = __import__('logging') logging = __import__('logging')
import mitogen.core import mitogen.core
import mitogen.fork
import mitogen.parent import mitogen.parent
import mitogen.service import mitogen.service
from mitogen.core import b from mitogen.core import b
try:
import json
except ImportError:
import simplejson as json
try: try:
reduce reduce
except NameError: except NameError:
...@@ -144,7 +142,7 @@ def subprocess__Popen__close_fds(self, but): ...@@ -144,7 +142,7 @@ def subprocess__Popen__close_fds(self, but):
if ( if (
sys.platform.startswith(u'linux') and sys.platform.startswith(u'linux') and
sys.version < u'3.0' and sys.version_info < (3,) and
hasattr(subprocess.Popen, u'_close_fds') and hasattr(subprocess.Popen, u'_close_fds') and
not mitogen.is_master not mitogen.is_master
): ):
...@@ -369,11 +367,6 @@ def init_child(econtext, log_level, candidate_temp_dirs): ...@@ -369,11 +367,6 @@ def init_child(econtext, log_level, candidate_temp_dirs):
LOG.setLevel(log_level) LOG.setLevel(log_level)
logging.getLogger('ansible_mitogen').setLevel(log_level) logging.getLogger('ansible_mitogen').setLevel(log_level)
# issue #536: if the json module is available, remove simplejson from the
# importer whitelist to avoid confusing certain Ansible modules.
if json.__name__ == 'json':
econtext.importer.whitelist.remove('simplejson')
global _fork_parent global _fork_parent
if FORK_SUPPORTED: if FORK_SUPPORTED:
mitogen.parent.upgrade_router(econtext) mitogen.parent.upgrade_router(econtext)
...@@ -652,7 +645,8 @@ def read_path(path): ...@@ -652,7 +645,8 @@ def read_path(path):
""" """
Fetch the contents of a filesystem `path` as bytes. Fetch the contents of a filesystem `path` as bytes.
""" """
return open(path, 'rb').read() with open(path, 'rb') as f:
return f.read()
def set_file_owner(path, owner, group=None, fd=None): def set_file_owner(path, owner, group=None, fd=None):
......
...@@ -26,9 +26,6 @@ ...@@ -26,9 +26,6 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
from __future__ import unicode_literals
""" """
Mitogen extends Ansible's target configuration mechanism in several ways that Mitogen extends Ansible's target configuration mechanism in several ways that
require some care: require some care:
...@@ -60,6 +57,10 @@ information from PlayContext, and another that takes (almost) all information ...@@ -60,6 +57,10 @@ information from PlayContext, and another that takes (almost) all information
from HostVars. from HostVars.
""" """
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
__metaclass__ = type
import abc import abc
import os import os
import ansible.utils.shlex import ansible.utils.shlex
...@@ -67,16 +68,89 @@ import ansible.constants as C ...@@ -67,16 +68,89 @@ import ansible.constants as C
from ansible.module_utils.six import with_metaclass from ansible.module_utils.six import with_metaclass
# this was added in Ansible >= 2.8.0; fallback to the default interpreter if necessary
try:
from ansible.executor.interpreter_discovery import discover_interpreter
except ImportError:
discover_interpreter = lambda action,interpreter_name,discovery_mode,task_vars: '/usr/bin/python'
try:
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
except ImportError:
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
import ansible_mitogen.loaders
import mitogen.core import mitogen.core
def parse_python_path(s): def run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python):
"""
Triggers ansible python interpreter discovery if requested.
Caches this value the same way Ansible does it.
For connections like `docker`, we want to rediscover the python interpreter because
it could be different than what's ran on the host
"""
# keep trying different interpreters until we don't error
if action._finding_python_interpreter:
return action._possible_python_interpreter
if s in ['auto', 'auto_legacy', 'auto_silent', 'auto_legacy_silent']:
# python is the only supported interpreter_name as of Ansible 2.8.8
interpreter_name = 'python'
discovered_interpreter_config = u'discovered_interpreter_%s' % interpreter_name
if task_vars.get('ansible_facts') is None:
task_vars['ansible_facts'] = {}
if rediscover_python and task_vars.get('ansible_facts', {}).get(discovered_interpreter_config):
# if we're rediscovering python then chances are we're running something like a docker connection
# this will handle scenarios like running a playbook that does stuff + then dynamically creates a docker container,
# then runs the rest of the playbook inside that container, and then rerunning the playbook again
action._rediscovered_python = True
# blow away the discovered_interpreter_config cache and rediscover
del task_vars['ansible_facts'][discovered_interpreter_config]
if discovered_interpreter_config not in task_vars['ansible_facts']:
action._finding_python_interpreter = True
# fake pipelining so discover_interpreter can be happy
action._connection.has_pipelining = True
s = AnsibleUnsafeText(discover_interpreter(
action=action,
interpreter_name=interpreter_name,
discovery_mode=s,
task_vars=task_vars))
# cache discovered interpreter
task_vars['ansible_facts'][discovered_interpreter_config] = s
action._connection.has_pipelining = False
else:
s = task_vars['ansible_facts'][discovered_interpreter_config]
# propagate discovered interpreter as fact
action._discovered_interpreter_key = discovered_interpreter_config
action._discovered_interpreter = s
action._finding_python_interpreter = False
return s
def parse_python_path(s, task_vars, action, rediscover_python):
""" """
Given the string set for ansible_python_interpeter, parse it using shell Given the string set for ansible_python_interpeter, parse it using shell
syntax and return an appropriate argument vector. syntax and return an appropriate argument vector. If the value detected is
one of interpreter discovery then run that first. Caches python interpreter
discovery value in `facts_from_task_vars` like how Ansible handles this.
""" """
if s: if not s:
# if python_path doesn't exist, default to `auto` and attempt to discover it
s = 'auto'
s = run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python)
# if unable to determine python_path, fallback to '/usr/bin/python'
if not s:
s = '/usr/bin/python'
return ansible.utils.shlex.shlex_split(s) return ansible.utils.shlex.shlex_split(s)
...@@ -282,6 +356,12 @@ class Spec(with_metaclass(abc.ABCMeta, object)): ...@@ -282,6 +356,12 @@ class Spec(with_metaclass(abc.ABCMeta, object)):
The path to the "machinectl" program for the 'setns' transport. The path to the "machinectl" program for the 'setns' transport.
""" """
@abc.abstractmethod
def mitogen_podman_path(self):
"""
The path to the "podman" program for the 'podman' transport.
"""
@abc.abstractmethod @abc.abstractmethod
def mitogen_ssh_keepalive_interval(self): def mitogen_ssh_keepalive_interval(self):
""" """
...@@ -330,6 +410,9 @@ class PlayContextSpec(Spec): ...@@ -330,6 +410,9 @@ class PlayContextSpec(Spec):
self._play_context = play_context self._play_context = play_context
self._transport = transport self._transport = transport
self._inventory_name = inventory_name self._inventory_name = inventory_name
self._task_vars = self._connection._get_task_vars()
# used to run interpreter discovery
self._action = connection._action
def transport(self): def transport(self):
return self._transport return self._transport
...@@ -353,7 +436,10 @@ class PlayContextSpec(Spec): ...@@ -353,7 +436,10 @@ class PlayContextSpec(Spec):
return self._play_context.become_user return self._play_context.become_user
def become_pass(self): def become_pass(self):
return optional_secret(self._play_context.become_pass) become_method = self.become_method()
become_plugin = ansible_mitogen.loaders.become_loader.get(become_method)
become_pass = become_plugin.get_option('become_pass', hostvars=self._task_vars)
return optional_secret(become_pass)
def password(self): def password(self):
return optional_secret(self._play_context.password) return optional_secret(self._play_context.password)
...@@ -361,18 +447,22 @@ class PlayContextSpec(Spec): ...@@ -361,18 +447,22 @@ class PlayContextSpec(Spec):
def port(self): def port(self):
return self._play_context.port return self._play_context.port
def python_path(self): def python_path(self, rediscover_python=False):
s = self._connection.get_task_var('ansible_python_interpreter') s = self._connection.get_task_var('ansible_python_interpreter')
# #511, #536: executor/module_common.py::_get_shebang() hard-wires # #511, #536: executor/module_common.py::_get_shebang() hard-wires
# "/usr/bin/python" as the default interpreter path if no other # "/usr/bin/python" as the default interpreter path if no other
# interpreter is specified. # interpreter is specified.
return parse_python_path(s or '/usr/bin/python') return parse_python_path(
s,
task_vars=self._task_vars,
action=self._action,
rediscover_python=rediscover_python)
def private_key_file(self): def private_key_file(self):
return self._play_context.private_key_file return self._play_context.private_key_file
def ssh_executable(self): def ssh_executable(self):
return self._play_context.ssh_executable return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
def timeout(self): def timeout(self):
return self._play_context.timeout return self._play_context.timeout
...@@ -388,9 +478,9 @@ class PlayContextSpec(Spec): ...@@ -388,9 +478,9 @@ class PlayContextSpec(Spec):
return [ return [
mitogen.core.to_text(term) mitogen.core.to_text(term)
for s in ( for s in (
getattr(self._play_context, 'ssh_args', ''), C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
getattr(self._play_context, 'ssh_common_args', ''), C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
getattr(self._play_context, 'ssh_extra_args', '') C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
) )
for term in ansible.utils.shlex.shlex_split(s or '') for term in ansible.utils.shlex.shlex_split(s or '')
] ]
...@@ -448,6 +538,9 @@ class PlayContextSpec(Spec): ...@@ -448,6 +538,9 @@ class PlayContextSpec(Spec):
def mitogen_lxc_info_path(self): def mitogen_lxc_info_path(self):
return self._connection.get_task_var('mitogen_lxc_info_path') return self._connection.get_task_var('mitogen_lxc_info_path')
def mitogen_podman_path(self):
return self._connection.get_task_var('mitogen_podman_path')
def mitogen_ssh_keepalive_interval(self): def mitogen_ssh_keepalive_interval(self):
return self._connection.get_task_var('mitogen_ssh_keepalive_interval') return self._connection.get_task_var('mitogen_ssh_keepalive_interval')
...@@ -490,14 +583,16 @@ class MitogenViaSpec(Spec): ...@@ -490,14 +583,16 @@ class MitogenViaSpec(Spec):
having a configruation problem with connection delegation, the answer to having a configruation problem with connection delegation, the answer to
your problem lies in the method implementations below! your problem lies in the method implementations below!
""" """
def __init__(self, inventory_name, host_vars, become_method, become_user, def __init__(self, inventory_name, host_vars, task_vars, become_method, become_user,
play_context): play_context, action):
""" """
:param str inventory_name: :param str inventory_name:
The inventory name of the intermediary machine, i.e. not the target The inventory name of the intermediary machine, i.e. not the target
machine. machine.
:param dict host_vars: :param dict host_vars:
The HostVars magic dictionary provided by Ansible in task_vars. The HostVars magic dictionary provided by Ansible in task_vars.
:param dict task_vars:
Task vars provided by Ansible.
:param str become_method: :param str become_method:
If the mitogen_via= spec included a become method, the method it If the mitogen_via= spec included a become method, the method it
specifies. specifies.
...@@ -509,14 +604,18 @@ class MitogenViaSpec(Spec): ...@@ -509,14 +604,18 @@ class MitogenViaSpec(Spec):
the real target machine. Values from this object are **strictly the real target machine. Values from this object are **strictly
restricted** to values that are Ansible-global, e.g. the passwords restricted** to values that are Ansible-global, e.g. the passwords
specified interactively. specified interactively.
:param ActionModuleMixin action:
Backref to the ActionModuleMixin required for ansible interpreter discovery
""" """
self._inventory_name = inventory_name self._inventory_name = inventory_name
self._host_vars = host_vars self._host_vars = host_vars
self._task_vars = task_vars
self._become_method = become_method self._become_method = become_method
self._become_user = become_user self._become_user = become_user
# Dangerous! You may find a variable you want in this object, but it's # Dangerous! You may find a variable you want in this object, but it's
# almost certainly for the wrong machine! # almost certainly for the wrong machine!
self._dangerous_play_context = play_context self._dangerous_play_context = play_context
self._action = action
def transport(self): def transport(self):
return ( return (
...@@ -557,8 +656,8 @@ class MitogenViaSpec(Spec): ...@@ -557,8 +656,8 @@ class MitogenViaSpec(Spec):
def become_pass(self): def become_pass(self):
return optional_secret( return optional_secret(
self._host_vars.get('ansible_become_password') or self._host_vars.get('ansible_become_pass') or
self._host_vars.get('ansible_become_pass') self._host_vars.get('ansible_become_password')
) )
def password(self): def password(self):
...@@ -574,12 +673,16 @@ class MitogenViaSpec(Spec): ...@@ -574,12 +673,16 @@ class MitogenViaSpec(Spec):
C.DEFAULT_REMOTE_PORT C.DEFAULT_REMOTE_PORT
) )
def python_path(self): def python_path(self, rediscover_python=False):
s = self._host_vars.get('ansible_python_interpreter') s = self._host_vars.get('ansible_python_interpreter')
# #511, #536: executor/module_common.py::_get_shebang() hard-wires # #511, #536: executor/module_common.py::_get_shebang() hard-wires
# "/usr/bin/python" as the default interpreter path if no other # "/usr/bin/python" as the default interpreter path if no other
# interpreter is specified. # interpreter is specified.
return parse_python_path(s or '/usr/bin/python') return parse_python_path(
s,
task_vars=self._task_vars,
action=self._action,
rediscover_python=rediscover_python)
def private_key_file(self): def private_key_file(self):
# TODO: must come from PlayContext too. # TODO: must come from PlayContext too.
...@@ -590,10 +693,7 @@ class MitogenViaSpec(Spec): ...@@ -590,10 +693,7 @@ class MitogenViaSpec(Spec):
) )
def ssh_executable(self): def ssh_executable(self):
return ( return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
self._host_vars.get('ansible_ssh_executable') or
C.ANSIBLE_SSH_EXECUTABLE
)
def timeout(self): def timeout(self):
# TODO: must come from PlayContext too. # TODO: must come from PlayContext too.
...@@ -610,22 +710,9 @@ class MitogenViaSpec(Spec): ...@@ -610,22 +710,9 @@ class MitogenViaSpec(Spec):
return [ return [
mitogen.core.to_text(term) mitogen.core.to_text(term)
for s in ( for s in (
( C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
self._host_vars.get('ansible_ssh_args') or C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
getattr(C, 'ANSIBLE_SSH_ARGS', None) or C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
os.environ.get('ANSIBLE_SSH_ARGS')
# TODO: ini entry. older versions.
),
(
self._host_vars.get('ansible_ssh_common_args') or
os.environ.get('ANSIBLE_SSH_COMMON_ARGS')
# TODO: ini entry.
),
(
self._host_vars.get('ansible_ssh_extra_args') or
os.environ.get('ANSIBLE_SSH_EXTRA_ARGS')
# TODO: ini entry.
),
) )
for term in ansible.utils.shlex.shlex_split(s) for term in ansible.utils.shlex.shlex_split(s)
if s if s
...@@ -666,7 +753,7 @@ class MitogenViaSpec(Spec): ...@@ -666,7 +753,7 @@ class MitogenViaSpec(Spec):
return self._host_vars.get('mitogen_kubectl_path') return self._host_vars.get('mitogen_kubectl_path')
def mitogen_lxc_path(self): def mitogen_lxc_path(self):
return self.host_vars.get('mitogen_lxc_path') return self._host_vars.get('mitogen_lxc_path')
def mitogen_lxc_attach_path(self): def mitogen_lxc_attach_path(self):
return self._host_vars.get('mitogen_lxc_attach_path') return self._host_vars.get('mitogen_lxc_attach_path')
...@@ -674,6 +761,9 @@ class MitogenViaSpec(Spec): ...@@ -674,6 +761,9 @@ class MitogenViaSpec(Spec):
def mitogen_lxc_info_path(self): def mitogen_lxc_info_path(self):
return self._host_vars.get('mitogen_lxc_info_path') return self._host_vars.get('mitogen_lxc_info_path')
def mitogen_podman_path(self):
return self._host_vars.get('mitogen_podman_path')
def mitogen_ssh_keepalive_interval(self): def mitogen_ssh_keepalive_interval(self):
return self._host_vars.get('mitogen_ssh_keepalive_interval') return self._host_vars.get('mitogen_ssh_keepalive_interval')
......
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
import ansible
__all__ = [
'ansible_version',
]
def _parse(v_string):
# Adapted from distutils.version.LooseVersion.parse()
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
for component in component_re.split(v_string):
if not component or component == '.':
continue
try:
yield int(component)
except ValueError:
yield component
ansible_version = tuple(_parse(ansible.__version__))
del _parse
del re
del ansible
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ansible
import ansible.utils.unsafe_proxy
import ansible_mitogen.utils
import mitogen
import mitogen.core
import mitogen.utils
__all__ = [
'cast',
]
def _cast_to_dict(obj): return {cast(k): cast(v) for k, v in obj.items()}
def _cast_to_list(obj): return [cast(v) for v in obj]
def _cast_unsafe(obj): return obj._strip_unsafe()
def _passthrough(obj): return obj
# A dispatch table to cast objects based on their exact type.
# This is an optimisation, reliable fallbacks are required (e.g. isinstance())
_CAST_DISPATCH = {
bytes: bytes,
dict: _cast_to_dict,
list: _cast_to_list,
tuple: _cast_to_list,
mitogen.core.UnicodeType: mitogen.core.UnicodeType,
}
_CAST_DISPATCH.update({t: _passthrough for t in mitogen.utils.PASSTHROUGH})
if hasattr(ansible.utils.unsafe_proxy.AnsibleUnsafeText, '_strip_unsafe'):
_CAST_DISPATCH.update({
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: _cast_unsafe,
ansible.utils.unsafe_proxy.AnsibleUnsafeText: _cast_unsafe,
ansible.utils.unsafe_proxy.NativeJinjaUnsafeText: _cast_unsafe,
})
elif ansible_mitogen.utils.ansible_version[:2] <= (2, 16):
_CAST_DISPATCH.update({
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: bytes,
ansible.utils.unsafe_proxy.AnsibleUnsafeText: mitogen.core.UnicodeType,
})
else:
mitogen_ver = '.'.join(str(v) for v in mitogen.__version__)
raise ImportError("Mitogen %s can't unwrap Ansible %s AnsibleUnsafe objects"
% (mitogen_ver, ansible.__version__))
def cast(obj):
"""
Return obj (or a copy) with subtypes of builtins cast to their supertype.
This is an enhanced version of :func:`mitogen.utils.cast`. In addition it
handles ``ansible.utils.unsafe_proxy.AnsibleUnsafeText`` and variants.
There are types handled by :func:`ansible.utils.unsafe_proxy.wrap_var()`
that this function currently does not handle (e.g. `set()`), or preserve
preserve (e.g. `tuple()`). Future enhancements may change this.
:param obj:
Object to undecorate.
:returns:
Undecorated object.
"""
# Fast path: obj is a known type, dispatch directly
try:
unwrapper = _CAST_DISPATCH[type(obj)]
except KeyError:
pass
else:
return unwrapper(obj)
# Slow path: obj is some unknown subclass
if isinstance(obj, dict): return _cast_to_dict(obj)
if isinstance(obj, (list, tuple)): return _cast_to_list(obj)
return mitogen.utils.cast(obj)
Metadata-Version: 1.1 Metadata-Version: 2.1
Name: mitogen Name: mitogen
Version: 0.2.9 Version: 0.3.7
Summary: Library for writing distributed self-replicating programs. Summary: Library for writing distributed self-replicating programs.
Home-page: https://github.com/dw/mitogen/ Home-page: https://github.com/mitogen-hq/mitogen/
Author: David Wilson Author: David Wilson
Author-email: UNKNOWN
License: New BSD License: New BSD
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Environment :: Console Classifier: Environment :: Console
Classifier: Framework :: Ansible
Classifier: Intended Audience :: System Administrators Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: BSD License Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.4
Classifier: Programming Language :: Python :: 2.5
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Topic :: System :: Distributed Computing Classifier: Topic :: System :: Distributed Computing
Classifier: Topic :: System :: Systems Administration Classifier: Topic :: System :: Systems Administration
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
Description-Content-Type: text/markdown
License-File: LICENSE
# Mitogen
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
![](https://i.imgur.com/eBM6LhJ.gif)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
...@@ -19,10 +19,6 @@ ansible_mitogen/strategy.py ...@@ -19,10 +19,6 @@ ansible_mitogen/strategy.py
ansible_mitogen/target.py ansible_mitogen/target.py
ansible_mitogen/transport_config.py ansible_mitogen/transport_config.py
ansible_mitogen/compat/__init__.py ansible_mitogen/compat/__init__.py
ansible_mitogen/compat/simplejson/__init__.py
ansible_mitogen/compat/simplejson/decoder.py
ansible_mitogen/compat/simplejson/encoder.py
ansible_mitogen/compat/simplejson/scanner.py
ansible_mitogen/plugins/__init__.py ansible_mitogen/plugins/__init__.py
ansible_mitogen/plugins/action/__init__.py ansible_mitogen/plugins/action/__init__.py
ansible_mitogen/plugins/action/mitogen_fetch.py ansible_mitogen/plugins/action/mitogen_fetch.py
...@@ -37,6 +33,7 @@ ansible_mitogen/plugins/connection/mitogen_local.py ...@@ -37,6 +33,7 @@ ansible_mitogen/plugins/connection/mitogen_local.py
ansible_mitogen/plugins/connection/mitogen_lxc.py ansible_mitogen/plugins/connection/mitogen_lxc.py
ansible_mitogen/plugins/connection/mitogen_lxd.py ansible_mitogen/plugins/connection/mitogen_lxd.py
ansible_mitogen/plugins/connection/mitogen_machinectl.py ansible_mitogen/plugins/connection/mitogen_machinectl.py
ansible_mitogen/plugins/connection/mitogen_podman.py
ansible_mitogen/plugins/connection/mitogen_setns.py ansible_mitogen/plugins/connection/mitogen_setns.py
ansible_mitogen/plugins/connection/mitogen_ssh.py ansible_mitogen/plugins/connection/mitogen_ssh.py
ansible_mitogen/plugins/connection/mitogen_su.py ansible_mitogen/plugins/connection/mitogen_su.py
...@@ -46,6 +43,8 @@ ansible_mitogen/plugins/strategy/mitogen.py ...@@ -46,6 +43,8 @@ ansible_mitogen/plugins/strategy/mitogen.py
ansible_mitogen/plugins/strategy/mitogen_free.py ansible_mitogen/plugins/strategy/mitogen_free.py
ansible_mitogen/plugins/strategy/mitogen_host_pinned.py ansible_mitogen/plugins/strategy/mitogen_host_pinned.py
ansible_mitogen/plugins/strategy/mitogen_linear.py ansible_mitogen/plugins/strategy/mitogen_linear.py
ansible_mitogen/utils/__init__.py
ansible_mitogen/utils/unsafe.py
mitogen/__init__.py mitogen/__init__.py
mitogen/buildah.py mitogen/buildah.py
mitogen/core.py mitogen/core.py
...@@ -62,6 +61,7 @@ mitogen/master.py ...@@ -62,6 +61,7 @@ mitogen/master.py
mitogen/minify.py mitogen/minify.py
mitogen/os_fork.py mitogen/os_fork.py
mitogen/parent.py mitogen/parent.py
mitogen/podman.py
mitogen/profiler.py mitogen/profiler.py
mitogen/select.py mitogen/select.py
mitogen/service.py mitogen/service.py
...@@ -79,3 +79,4 @@ mitogen.egg-info/top_level.txt ...@@ -79,3 +79,4 @@ mitogen.egg-info/top_level.txt
mitogen/compat/__init__.py mitogen/compat/__init__.py
mitogen/compat/pkgutil.py mitogen/compat/pkgutil.py
mitogen/compat/tokenize.py mitogen/compat/tokenize.py
tests/testlib.py
\ No newline at end of file
...@@ -35,7 +35,7 @@ be expected. On the slave, it is built dynamically during startup. ...@@ -35,7 +35,7 @@ be expected. On the slave, it is built dynamically during startup.
#: Library version as a tuple. #: Library version as a tuple.
__version__ = (0, 2, 9) __version__ = (0, 3, 7)
#: This is :data:`False` in slave contexts. Previously it was used to prevent #: This is :data:`False` in slave contexts. Previously it was used to prevent
......
...@@ -30,7 +30,6 @@ ...@@ -30,7 +30,6 @@
import logging import logging
import mitogen.core
import mitogen.parent import mitogen.parent
......
...@@ -34,6 +34,34 @@ non-essential code in order to reduce its size, since it is also serves as the ...@@ -34,6 +34,34 @@ non-essential code in order to reduce its size, since it is also serves as the
bootstrap implementation sent to every new slave context. bootstrap implementation sent to every new slave context.
""" """
import sys
try:
import _frozen_importlib_external
except ImportError:
pass
else:
class MonkeyPatchedPathFinder(_frozen_importlib_external.PathFinder):
"""
Meta path finder for sys.path and package __path__ attributes.
Patched for https://github.com/python/cpython/issues/115911.
"""
@classmethod
def _path_importer_cache(cls, path):
if path == '':
try:
path = _frozen_importlib_external._os.getcwd()
except (FileNotFoundError, PermissionError):
return None
return super()._path_importer_cache(path)
if sys.version_info[:2] <= (3, 12):
for i, mpf in enumerate(sys.meta_path):
if mpf is _frozen_importlib_external.PathFinder:
sys.meta_path[i] = MonkeyPatchedPathFinder
del i, mpf
import binascii import binascii
import collections import collections
import encodings.latin_1 import encodings.latin_1
...@@ -49,18 +77,22 @@ import pstats ...@@ -49,18 +77,22 @@ import pstats
import signal import signal
import socket import socket
import struct import struct
import sys
import syslog import syslog
import threading import threading
import time import time
import traceback import traceback
import types
import warnings import warnings
import weakref import weakref
import zlib import zlib
# Python >3.7 deprecated the imp module. try:
warnings.filterwarnings('ignore', message='the imp module is deprecated') # Python >= 3.4, PEP 451 ModuleSpec API
import imp import importlib.machinery
import importlib.util
except ImportError:
# Python < 3.4, PEP 302 Import Hooks
import imp
# Absolute imports for <2.5. # Absolute imports for <2.5.
select = __import__('select') select = __import__('select')
...@@ -386,6 +418,20 @@ def _partition(s, sep, find): ...@@ -386,6 +418,20 @@ def _partition(s, sep, find):
return left, sep, s[len(left)+len(sep):] return left, sep, s[len(left)+len(sep):]
def threading__current_thread():
try:
return threading.current_thread() # Added in Python 2.6+
except AttributeError:
return threading.currentThread() # Deprecated in Python 3.10+
def threading__thread_name(thread):
try:
return thread.name # Added in Python 2.6+
except AttributeError:
return thread.getName() # Deprecated in Python 3.10+
if hasattr(UnicodeType, 'rpartition'): if hasattr(UnicodeType, 'rpartition'):
str_partition = UnicodeType.partition str_partition = UnicodeType.partition
str_rpartition = UnicodeType.rpartition str_rpartition = UnicodeType.rpartition
...@@ -1254,6 +1300,7 @@ class Importer(object): ...@@ -1254,6 +1300,7 @@ class Importer(object):
'minify', 'minify',
'os_fork', 'os_fork',
'parent', 'parent',
'podman',
'select', 'select',
'service', 'service',
'setns', 'setns',
...@@ -1269,6 +1316,13 @@ class Importer(object): ...@@ -1269,6 +1316,13 @@ class Importer(object):
# a negative round-trip. # a negative round-trip.
'builtins', 'builtins',
'__builtin__', '__builtin__',
# On some Python releases (e.g. 3.8, 3.9) the subprocess module tries
# to import of this Windows-only builtin module.
'msvcrt',
# Python 2.x module that was renamed to _thread in 3.x.
# This entry avoids a roundtrip on 2.x -> 3.x.
'thread', 'thread',
# org.python.core imported by copy, pickle, xml.sax; breaks Jython, but # org.python.core imported by copy, pickle, xml.sax; breaks Jython, but
...@@ -1331,6 +1385,19 @@ class Importer(object): ...@@ -1331,6 +1385,19 @@ class Importer(object):
def __repr__(self): def __repr__(self):
return 'Importer' return 'Importer'
@staticmethod
def _loader_from_module(module, default=None):
"""Return the loader for a module object."""
try:
return module.__spec__.loader
except AttributeError:
pass
try:
return module.__loader__
except AttributeError:
pass
return default
def builtin_find_module(self, fullname): def builtin_find_module(self, fullname):
# imp.find_module() will always succeed for __main__, because it is a # imp.find_module() will always succeed for __main__, because it is a
# built-in module. That means it exists on a special linked list deep # built-in module. That means it exists on a special linked list deep
...@@ -1338,17 +1405,35 @@ class Importer(object): ...@@ -1338,17 +1405,35 @@ class Importer(object):
if fullname == '__main__': if fullname == '__main__':
raise ModuleNotFoundError() raise ModuleNotFoundError()
# For a module inside a package (e.g. pkg_a.mod_b) use the search path
# of that package (e.g. ['/usr/lib/python3.11/site-packages/pkg_a']).
parent, _, modname = str_rpartition(fullname, '.') parent, _, modname = str_rpartition(fullname, '.')
if parent: if parent:
path = sys.modules[parent].__path__ path = sys.modules[parent].__path__
else: else:
path = None path = None
# For a top-level module search builtin modules, frozen modules,
# system specific locations (e.g. Windows registry, site-packages).
# Otherwise use search path of the parent package.
# Works for both stdlib modules & third-party modules.
# If the search is unsuccessful then raises ImportError.
fp, pathname, description = imp.find_module(modname, path) fp, pathname, description = imp.find_module(modname, path)
if fp: if fp:
fp.close() fp.close()
def find_module(self, fullname, path=None): def find_module(self, fullname, path=None):
"""
Return a loader (ourself) or None, for the module with fullname.
Implements importlib.abc.MetaPathFinder.find_module().
Deprecrated in Python 3.4+, replaced by find_spec().
Raises ImportWarning in Python 3.10+.
Removed in Python 3.12.
fullname Fully qualified module name, e.g. "os.path".
path __path__ of parent packge. None for a top level module.
"""
if hasattr(_tls, 'running'): if hasattr(_tls, 'running'):
return None return None
...@@ -1356,14 +1441,13 @@ class Importer(object): ...@@ -1356,14 +1441,13 @@ class Importer(object):
try: try:
#_v and self._log.debug('Python requested %r', fullname) #_v and self._log.debug('Python requested %r', fullname)
fullname = to_text(fullname) fullname = to_text(fullname)
pkgname, dot, _ = str_rpartition(fullname, '.') pkgname, _, suffix = str_rpartition(fullname, '.')
pkg = sys.modules.get(pkgname) pkg = sys.modules.get(pkgname)
if pkgname and getattr(pkg, '__loader__', None) is not self: if pkgname and getattr(pkg, '__loader__', None) is not self:
self._log.debug('%s is submodule of a locally loaded package', self._log.debug('%s is submodule of a locally loaded package',
fullname) fullname)
return None return None
suffix = fullname[len(pkgname+dot):]
if pkgname and suffix not in self._present.get(pkgname, ()): if pkgname and suffix not in self._present.get(pkgname, ()):
self._log.debug('%s has no submodule %s', pkgname, suffix) self._log.debug('%s has no submodule %s', pkgname, suffix)
return None return None
...@@ -1383,6 +1467,66 @@ class Importer(object): ...@@ -1383,6 +1467,66 @@ class Importer(object):
finally: finally:
del _tls.running del _tls.running
def find_spec(self, fullname, path, target=None):
"""
Return a `ModuleSpec` for module with `fullname` if we will load it.
Otherwise return `None`, allowing other finders to try.
fullname Fully qualified name of the module (e.g. foo.bar.baz)
path Path entries to search. None for a top-level module.
target Existing module to be reloaded (if any).
Implements importlib.abc.MetaPathFinder.find_spec()
Python 3.4+.
"""
# Presence of _tls.running indicates we've re-invoked importlib.
# Abort early to prevent infinite recursion. See below.
if hasattr(_tls, 'running'):
return None
log = self._log.getChild('find_spec')
if fullname.endswith('.'):
return None
pkgname, _, modname = fullname.rpartition('.')
if pkgname and modname not in self._present.get(pkgname, ()):
log.debug('Skipping %s. Parent %s has no submodule %s',
fullname, pkgname, modname)
return None
pkg = sys.modules.get(pkgname)
pkg_loader = self._loader_from_module(pkg)
if pkgname and pkg_loader is not self:
log.debug('Skipping %s. Parent %s was loaded by %r',
fullname, pkgname, pkg_loader)
return None
# #114: whitelisted prefixes override any system-installed package.
if self.whitelist != ['']:
if any(s and fullname.startswith(s) for s in self.whitelist):
log.debug('Handling %s. It is whitelisted', fullname)
return importlib.machinery.ModuleSpec(fullname, loader=self)
if fullname == '__main__':
log.debug('Handling %s. A special case', fullname)
return importlib.machinery.ModuleSpec(fullname, loader=self)
# Re-invoke the import machinery to allow other finders to try.
# Set a guard, so we don't infinitely recurse. See top of this method.
_tls.running = True
try:
spec = importlib.util._find_spec(fullname, path, target)
finally:
del _tls.running
if spec:
log.debug('Skipping %s. Available as %r', fullname, spec)
return spec
log.debug('Handling %s. Unavailable locally', fullname)
return importlib.machinery.ModuleSpec(fullname, loader=self)
blacklisted_msg = ( blacklisted_msg = (
'%r is present in the Mitogen importer blacklist, therefore this ' '%r is present in the Mitogen importer blacklist, therefore this '
'context will not attempt to request it from the master, as the ' 'context will not attempt to request it from the master, as the '
...@@ -1469,7 +1613,71 @@ class Importer(object): ...@@ -1469,7 +1613,71 @@ class Importer(object):
if present: if present:
callback() callback()
def create_module(self, spec):
"""
Return a module object for the given ModuleSpec.
Implements PEP-451 importlib.abc.Loader API introduced in Python 3.4.
Unlike Loader.load_module() this shouldn't populate sys.modules or
set module attributes. Both are done by Python.
"""
self._log.debug('Creating module for %r', spec)
# FIXME Should this be done in find_spec()? Can it?
self._refuse_imports(spec.name)
# FIXME "create_module() should properly handle the case where it is
# called more than once for the same spec/module." -- PEP-451
event = threading.Event()
self._request_module(spec.name, callback=event.set)
event.wait()
# 0:fullname 1:pkg_present 2:path 3:compressed 4:related
_, pkg_present, path, _, _ = self._cache[spec.name]
if path is None:
raise ImportError(self.absent_msg % (spec.name))
spec.origin = self.get_filename(spec.name)
if pkg_present is not None:
# TODO Namespace packages
spec.submodule_search_locations = []
self._present[spec.name] = pkg_present
module = types.ModuleType(spec.name)
# FIXME create_module() shouldn't initialise module attributes
module.__file__ = spec.origin
return module
def exec_module(self, module):
"""
Execute the module to initialise it. Don't return anything.
Implements PEP-451 importlib.abc.Loader API, introduced in Python 3.4.
"""
name = module.__spec__.name
origin = module.__spec__.origin
self._log.debug('Executing %s from %s', name, origin)
source = self.get_source(name)
try:
# Compile the source into a code object. Don't add any __future__
# flags and don't inherit any from this module.
# FIXME Should probably be exposed as get_code()
code = compile(source, origin, 'exec', flags=0, dont_inherit=True)
except SyntaxError:
# FIXME Why is this LOG, rather than self._log?
LOG.exception('while importing %r', name)
raise
exec(code, module.__dict__)
def load_module(self, fullname): def load_module(self, fullname):
"""
Return the loaded module specified by fullname.
Implements importlib.abc.Loader.load_module().
Deprecated in Python 3.4+, replaced by create_module() & exec_module().
"""
fullname = to_text(fullname) fullname = to_text(fullname)
_v and self._log.debug('requesting %s', fullname) _v and self._log.debug('requesting %s', fullname)
self._refuse_imports(fullname) self._refuse_imports(fullname)
...@@ -1478,11 +1686,11 @@ class Importer(object): ...@@ -1478,11 +1686,11 @@ class Importer(object):
self._request_module(fullname, event.set) self._request_module(fullname, event.set)
event.wait() event.wait()
ret = self._cache[fullname] # 0:fullname 1:pkg_present 2:path 3:compressed 4:related
if ret[2] is None: _, pkg_present, path, _, _ = self._cache[fullname]
if path is None:
raise ModuleNotFoundError(self.absent_msg % (fullname,)) raise ModuleNotFoundError(self.absent_msg % (fullname,))
pkg_present = ret[1]
mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = self.get_filename(fullname) mod.__file__ = self.get_filename(fullname)
mod.__loader__ = self mod.__loader__ = self
...@@ -2679,7 +2887,7 @@ class Latch(object): ...@@ -2679,7 +2887,7 @@ class Latch(object):
raise e raise e
assert cookie == got_cookie, ( assert cookie == got_cookie, (
"Cookie incorrect; got %r, expected %r" \ "Cookie incorrect; got %r, expected %r"
% (binascii.hexlify(got_cookie), % (binascii.hexlify(got_cookie),
binascii.hexlify(cookie)) binascii.hexlify(cookie))
) )
...@@ -2734,7 +2942,7 @@ class Latch(object): ...@@ -2734,7 +2942,7 @@ class Latch(object):
return 'Latch(%#x, size=%d, t=%r)' % ( return 'Latch(%#x, size=%d, t=%r)' % (
id(self), id(self),
len(self._queue), len(self._queue),
threading.currentThread().getName(), threading__thread_name(threading__current_thread()),
) )
...@@ -2801,7 +3009,7 @@ class Waker(Protocol): ...@@ -2801,7 +3009,7 @@ class Waker(Protocol):
self.stream.transmit_side.write(b(' ')) self.stream.transmit_side.write(b(' '))
except OSError: except OSError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
if e.args[0] in (errno.EBADF, errno.EWOULDBLOCK): if e.args[0] not in (errno.EBADF, errno.EWOULDBLOCK):
raise raise
broker_shutdown_msg = ( broker_shutdown_msg = (
...@@ -3634,7 +3842,6 @@ class Dispatcher(object): ...@@ -3634,7 +3842,6 @@ class Dispatcher(object):
self._service_recv.notify = None self._service_recv.notify = None
self.recv.close() self.recv.close()
@classmethod @classmethod
@takes_econtext @takes_econtext
def forget_chain(cls, chain_id, econtext): def forget_chain(cls, chain_id, econtext):
...@@ -3860,7 +4067,7 @@ class ExternalContext(object): ...@@ -3860,7 +4067,7 @@ class ExternalContext(object):
else: else:
core_src_fd = self.config.get('core_src_fd', 101) core_src_fd = self.config.get('core_src_fd', 101)
if core_src_fd: if core_src_fd:
fp = os.fdopen(core_src_fd, 'rb', 1) fp = os.fdopen(core_src_fd, 'rb', 0)
try: try:
core_src = fp.read() core_src = fp.read()
# Strip "ExternalContext.main()" call from last line. # Strip "ExternalContext.main()" call from last line.
...@@ -3884,7 +4091,7 @@ class ExternalContext(object): ...@@ -3884,7 +4091,7 @@ class ExternalContext(object):
def _setup_package(self): def _setup_package(self):
global mitogen global mitogen
mitogen = imp.new_module('mitogen') mitogen = types.ModuleType('mitogen')
mitogen.__package__ = 'mitogen' mitogen.__package__ = 'mitogen'
mitogen.__path__ = [] mitogen.__path__ = []
mitogen.__loader__ = self.importer mitogen.__loader__ = self.importer
......
...@@ -103,7 +103,6 @@ import tempfile ...@@ -103,7 +103,6 @@ import tempfile
import threading import threading
import mitogen.core import mitogen.core
import mitogen.master
import mitogen.parent import mitogen.parent
from mitogen.core import LOG, IOLOG from mitogen.core import LOG, IOLOG
...@@ -200,7 +199,7 @@ class Process(object): ...@@ -200,7 +199,7 @@ class Process(object):
def _on_stdin(self, msg): def _on_stdin(self, msg):
if msg.is_dead: if msg.is_dead:
IOLOG.debug('%r._on_stdin() -> %r', self, data) IOLOG.debug('%r._on_stdin() -> %r', self, msg)
self.pump.protocol.close() self.pump.protocol.close()
return return
...@@ -437,7 +436,7 @@ def run(dest, router, args, deadline=None, econtext=None): ...@@ -437,7 +436,7 @@ def run(dest, router, args, deadline=None, econtext=None):
fp.write(inspect.getsource(mitogen.core)) fp.write(inspect.getsource(mitogen.core))
fp.write('\n') fp.write('\n')
fp.write('ExternalContext(%r).main()\n' % ( fp.write('ExternalContext(%r).main()\n' % (
_get_econtext_config(context, sock2), _get_econtext_config(econtext, sock2),
)) ))
finally: finally:
fp.close() fp.close()
......
...@@ -28,7 +28,6 @@ ...@@ -28,7 +28,6 @@
# !mitogen: minify_safe # !mitogen: minify_safe
import mitogen.core
import mitogen.parent import mitogen.parent
......
...@@ -28,7 +28,6 @@ ...@@ -28,7 +28,6 @@
# !mitogen: minify_safe # !mitogen: minify_safe
import mitogen.core
import mitogen.parent import mitogen.parent
......
...@@ -28,7 +28,6 @@ ...@@ -28,7 +28,6 @@
# !mitogen: minify_safe # !mitogen: minify_safe
import mitogen.core
import mitogen.parent import mitogen.parent
......
...@@ -37,7 +37,6 @@ contexts. ...@@ -37,7 +37,6 @@ contexts.
import dis import dis
import errno import errno
import imp
import inspect import inspect
import itertools import itertools
import logging import logging
...@@ -50,6 +49,16 @@ import threading ...@@ -50,6 +49,16 @@ import threading
import types import types
import zlib import zlib
try:
# Python >= 3.4, PEP 451 ModuleSpec API
import importlib.machinery
import importlib.util
from _imp import is_builtin as _is_builtin
except ImportError:
# Python < 3.4, PEP 302 Import Hooks
import imp
from imp import is_builtin as _is_builtin
try: try:
import sysconfig import sysconfig
except ImportError: except ImportError:
...@@ -89,6 +98,14 @@ except NameError: ...@@ -89,6 +98,14 @@ except NameError:
RLOG = logging.getLogger('mitogen.ctx') RLOG = logging.getLogger('mitogen.ctx')
# there are some cases where modules are loaded in memory only, such as
# ansible collections, and the module "filename" doesn't actually exist
SPECIAL_FILE_PATHS = {
"__synthetic__",
"<ansible_synthetic_collection_package>"
}
def _stdlib_paths(): def _stdlib_paths():
""" """
Return a set of paths from which Python imports the standard library. Return a set of paths from which Python imports the standard library.
...@@ -100,7 +117,7 @@ def _stdlib_paths(): ...@@ -100,7 +117,7 @@ def _stdlib_paths():
] ]
prefixes = (getattr(sys, a, None) for a in attr_candidates) prefixes = (getattr(sys, a, None) for a in attr_candidates)
version = 'python%s.%s' % sys.version_info[0:2] version = 'python%s.%s' % sys.version_info[0:2]
s = set(os.path.abspath(os.path.join(p, 'lib', version)) s = set(os.path.realpath(os.path.join(p, 'lib', version))
for p in prefixes if p is not None) for p in prefixes if p is not None)
# When running 'unit2 tests/module_finder_test.py' in a Py2 venv on Ubuntu # When running 'unit2 tests/module_finder_test.py' in a Py2 venv on Ubuntu
...@@ -114,7 +131,16 @@ def is_stdlib_name(modname): ...@@ -114,7 +131,16 @@ def is_stdlib_name(modname):
""" """
Return :data:`True` if `modname` appears to come from the standard library. Return :data:`True` if `modname` appears to come from the standard library.
""" """
if imp.is_builtin(modname) != 0: # `(_imp|imp).is_builtin()` isn't a documented part of Python's stdlib.
# Returns 1 if modname names a module that is "builtin" to the the Python
# interpreter (e.g. '_sre'). Otherwise 0 (e.g. 're', 'netifaces').
#
# """
# Main is a little special - imp.is_builtin("__main__") will return False,
# but BuiltinImporter is still the most appropriate initial setting for
# its __loader__ attribute.
# """ -- comment in CPython pylifecycle.c:add_main_module()
if _is_builtin(modname) != 0:
return True return True
module = sys.modules.get(modname) module = sys.modules.get(modname)
...@@ -138,7 +164,7 @@ def is_stdlib_path(path): ...@@ -138,7 +164,7 @@ def is_stdlib_path(path):
) )
def get_child_modules(path): def get_child_modules(path, fullname):
""" """
Return the suffixes of submodules directly neated beneath of the package Return the suffixes of submodules directly neated beneath of the package
directory at `path`. directory at `path`.
...@@ -147,12 +173,19 @@ def get_child_modules(path): ...@@ -147,12 +173,19 @@ def get_child_modules(path):
Path to the module's source code on disk, or some PEP-302-recognized Path to the module's source code on disk, or some PEP-302-recognized
equivalent. Usually this is the module's ``__file__`` attribute, but equivalent. Usually this is the module's ``__file__`` attribute, but
is specified explicitly to avoid loading the module. is specified explicitly to avoid loading the module.
:param str fullname:
Name of the package we're trying to get child modules for
:return: :return:
List of submodule name suffixes. List of submodule name suffixes.
""" """
it = pkgutil.iter_modules([os.path.dirname(path)]) mod_path = os.path.dirname(path)
return [to_text(name) for _, name, _ in it] if mod_path != '':
return [to_text(name) for _, name, _ in pkgutil.iter_modules([mod_path])]
else:
# we loaded some weird package in memory, so we'll see if it has a custom loader we can use
loader = pkgutil.find_loader(fullname)
return [to_text(name) for name, _ in loader.iter_modules(None)] if loader else []
def _looks_like_script(path): def _looks_like_script(path):
...@@ -177,17 +210,31 @@ def _looks_like_script(path): ...@@ -177,17 +210,31 @@ def _looks_like_script(path):
def _py_filename(path): def _py_filename(path):
"""
Returns a tuple of a Python path (if the file looks Pythonic) and whether or not
the Python path is special. Special file paths/modules might only exist in memory
"""
if not path: if not path:
return None return None, False
if path[-4:] in ('.pyc', '.pyo'): if path[-4:] in ('.pyc', '.pyo'):
path = path.rstrip('co') path = path.rstrip('co')
if path.endswith('.py'): if path.endswith('.py'):
return path return path, False
if os.path.exists(path) and _looks_like_script(path): if os.path.exists(path) and _looks_like_script(path):
return path return path, False
basepath = os.path.basename(path)
if basepath in SPECIAL_FILE_PATHS:
return path, True
# return None, False means that the filename passed to _py_filename does not appear
# to be python, and code later will handle when this function returns None
# see https://github.com/dw/mitogen/pull/715#discussion_r532380528 for how this
# decision was made to handle non-python files in this manner
return None, False
def _get_core_source(): def _get_core_source():
...@@ -424,6 +471,9 @@ class FinderMethod(object): ...@@ -424,6 +471,9 @@ class FinderMethod(object):
name according to the running Python interpreter. You'd think this was a name according to the running Python interpreter. You'd think this was a
simple task, right? Naive young fellow, welcome to the real world. simple task, right? Naive young fellow, welcome to the real world.
""" """
def __init__(self):
self.log = LOG.getChild(self.__class__.__name__)
def __repr__(self): def __repr__(self):
return '%s()' % (type(self).__name__,) return '%s()' % (type(self).__name__,)
...@@ -483,38 +533,57 @@ class PkgutilMethod(FinderMethod): ...@@ -483,38 +533,57 @@ class PkgutilMethod(FinderMethod):
Find `fullname` using :func:`pkgutil.find_loader`. Find `fullname` using :func:`pkgutil.find_loader`.
""" """
try: try:
# If fullname refers to a submodule that's not already imported
# then the containing package is imported.
# Pre-'import spec' this returned None, in Python3.6 it raises # Pre-'import spec' this returned None, in Python3.6 it raises
# ImportError. # ImportError.
loader = pkgutil.find_loader(fullname) loader = pkgutil.find_loader(fullname)
except ImportError: except ImportError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
LOG.debug('%r._get_module_via_pkgutil(%r): %s', LOG.debug('%r: find_loader(%r) failed: %s', self, fullname, e)
self, fullname, e)
return None return None
IOLOG.debug('%r._get_module_via_pkgutil(%r) -> %r',
self, fullname, loader)
if not loader: if not loader:
LOG.debug('%r: find_loader(%r) returned %r, aborting',
self, fullname, loader)
return return
try: try:
path = _py_filename(loader.get_filename(fullname)) path = loader.get_filename(fullname)
source = loader.get_source(fullname) except (AttributeError, ImportError, ValueError):
is_pkg = loader.is_package(fullname)
except (AttributeError, ImportError):
# - Per PEP-302, get_source() and is_package() are optional,
# calling them may throw AttributeError.
# - get_filename() may throw ImportError if pkgutil.find_loader() # - get_filename() may throw ImportError if pkgutil.find_loader()
# picks a "parent" package's loader for some crap that's been # picks a "parent" package's loader for some crap that's been
# stuffed in sys.modules, for example in the case of urllib3: # stuffed in sys.modules, for example in the case of urllib3:
# "loader for urllib3.contrib.pyopenssl cannot handle # "loader for urllib3.contrib.pyopenssl cannot handle
# requests.packages.urllib3.contrib.pyopenssl" # requests.packages.urllib3.contrib.pyopenssl"
e = sys.exc_info()[1] e = sys.exc_info()[1]
LOG.debug('%r: loading %r using %r failed: %s', LOG.debug('%r: %r.get_file_name(%r) failed: %r', self, loader, fullname, e)
self, fullname, loader, e) return
path, is_special = _py_filename(path)
try:
source = loader.get_source(fullname)
except AttributeError:
# Per PEP-302, get_source() is optional,
e = sys.exc_info()[1]
LOG.debug('%r: %r.get_source() failed: %r', self, loader, fullname, e)
return return
try:
is_pkg = loader.is_package(fullname)
except AttributeError:
# Per PEP-302, is_package() is optional,
e = sys.exc_info()[1]
LOG.debug('%r: %r.is_package(%r) failed: %r', self, loader, fullname, e)
return
# workaround for special python modules that might only exist in memory
if is_special and is_pkg and not source:
source = '\n'
if path is None or source is None: if path is None or source is None:
LOG.debug('%r: path=%r, source=%r, aborting', self, path, source)
return return
if isinstance(source, mitogen.core.UnicodeType): if isinstance(source, mitogen.core.UnicodeType):
...@@ -534,23 +603,37 @@ class SysModulesMethod(FinderMethod): ...@@ -534,23 +603,37 @@ class SysModulesMethod(FinderMethod):
""" """
Find `fullname` using its :data:`__file__` attribute. Find `fullname` using its :data:`__file__` attribute.
""" """
module = sys.modules.get(fullname) try:
module = sys.modules[fullname]
except KeyError:
LOG.debug('%r: sys.modules[%r] absent, aborting', self, fullname)
return
if not isinstance(module, types.ModuleType): if not isinstance(module, types.ModuleType):
LOG.debug('%r: sys.modules[%r] absent or not a regular module', LOG.debug('%r: sys.modules[%r] is %r, aborting',
self, fullname) self, fullname, module)
return
try:
resolved_name = module.__name__
except AttributeError:
LOG.debug('%r: %r has no __name__, aborting', self, module)
return
if resolved_name != fullname:
LOG.debug('%r: %r.__name__ is %r, aborting',
self, module, resolved_name)
return return
LOG.debug('_get_module_via_sys_modules(%r) -> %r', fullname, module) try:
alleged_name = getattr(module, '__name__', None) path = module.__file__
if alleged_name != fullname: except AttributeError:
LOG.debug('sys.modules[%r].__name__ is incorrect, assuming ' LOG.debug('%r: %r has no __file__, aborting', self, module)
'this is a hacky module alias and ignoring it. '
'Got %r, module object: %r',
fullname, alleged_name, module)
return return
path = _py_filename(getattr(module, '__file__', '')) path, _ = _py_filename(path)
if not path: if not path:
LOG.debug('%r: %r.__file__ is %r, aborting', self, module, path)
return return
LOG.debug('%r: sys.modules[%r]: found %s', self, fullname, path) LOG.debug('%r: sys.modules[%r]: found %s', self, fullname, path)
...@@ -572,7 +655,7 @@ class SysModulesMethod(FinderMethod): ...@@ -572,7 +655,7 @@ class SysModulesMethod(FinderMethod):
return path, source, is_pkg return path, source, is_pkg
class ParentEnumerationMethod(FinderMethod): class ParentImpEnumerationMethod(FinderMethod):
""" """
Attempt to fetch source code by examining the module's (hopefully less Attempt to fetch source code by examining the module's (hopefully less
insane) parent package, and if no insane parents exist, simply use insane) parent package, and if no insane parents exist, simply use
...@@ -595,10 +678,24 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -595,10 +678,24 @@ class ParentEnumerationMethod(FinderMethod):
module object or any parent package's :data:`__path__`, since they have all module object or any parent package's :data:`__path__`, since they have all
been overwritten. Some men just want to watch the world burn. been overwritten. Some men just want to watch the world burn.
""" """
@staticmethod
def _iter_parents(fullname):
"""
>>> list(ParentEnumerationMethod._iter_parents('a'))
[('', 'a')]
>>> list(ParentEnumerationMethod._iter_parents('a.b.c'))
[('a.b', 'c'), ('a', 'b'), ('', 'a')]
"""
while fullname:
fullname, _, modname = str_rpartition(fullname, u'.')
yield fullname, modname
def _find_sane_parent(self, fullname): def _find_sane_parent(self, fullname):
""" """
Iteratively search :data:`sys.modules` for the least indirect parent of Iteratively search :data:`sys.modules` for the least indirect parent of
`fullname` that is loaded and contains a :data:`__path__` attribute. `fullname` that's from the same package and has a :data:`__path__`
attribute.
:return: :return:
`(parent_name, path, modpath)` tuple, where: `(parent_name, path, modpath)` tuple, where:
...@@ -611,21 +708,40 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -611,21 +708,40 @@ class ParentEnumerationMethod(FinderMethod):
* `modpath`: list of module name components leading from `path` * `modpath`: list of module name components leading from `path`
to the target module. to the target module.
""" """
path = None
modpath = [] modpath = []
while True: for pkgname, modname in self._iter_parents(fullname):
pkgname, _, modname = str_rpartition(to_text(fullname), u'.')
modpath.insert(0, modname) modpath.insert(0, modname)
if not pkgname: if not pkgname:
return [], None, modpath return [], None, modpath
pkg = sys.modules.get(pkgname) try:
path = getattr(pkg, '__path__', None) pkg = sys.modules[pkgname]
if pkg and path: except KeyError:
return pkgname.split('.'), path, modpath LOG.debug('%r: sys.modules[%r] absent, skipping', self, pkgname)
continue
try:
resolved_pkgname = pkg.__name__
except AttributeError:
LOG.debug('%r: %r has no __name__, skipping', self, pkg)
continue
LOG.debug('%r: %r lacks __path__ attribute', self, pkgname) if resolved_pkgname != pkgname:
fullname = pkgname LOG.debug('%r: %r.__name__ is %r, skipping',
self, pkg, resolved_pkgname)
continue
try:
path = pkg.__path__
except AttributeError:
LOG.debug('%r: %r has no __path__, skipping', self, pkg)
continue
if not path:
LOG.debug('%r: %r.__path__ is %r, skipping', self, pkg, path)
continue
return pkgname.split('.'), path, modpath
def _found_package(self, fullname, path): def _found_package(self, fullname, path):
path = os.path.join(path, '__init__.py') path = os.path.join(path, '__init__.py')
...@@ -639,7 +755,7 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -639,7 +755,7 @@ class ParentEnumerationMethod(FinderMethod):
def _found_module(self, fullname, path, fp, is_pkg=False): def _found_module(self, fullname, path, fp, is_pkg=False):
try: try:
path = _py_filename(path) path, _ = _py_filename(path)
if not path: if not path:
return return
...@@ -657,6 +773,7 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -657,6 +773,7 @@ class ParentEnumerationMethod(FinderMethod):
def _find_one_component(self, modname, search_path): def _find_one_component(self, modname, search_path):
try: try:
#fp, path, (suffix, _, kind) = imp.find_module(modname, search_path) #fp, path, (suffix, _, kind) = imp.find_module(modname, search_path)
# FIXME The imp module was removed in Python 3.12.
return imp.find_module(modname, search_path) return imp.find_module(modname, search_path)
except ImportError: except ImportError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
...@@ -668,6 +785,9 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -668,6 +785,9 @@ class ParentEnumerationMethod(FinderMethod):
""" """
See implementation for a description of how this works. See implementation for a description of how this works.
""" """
if sys.version_info >= (3, 4):
return None
#if fullname not in sys.modules: #if fullname not in sys.modules:
# Don't attempt this unless a module really exists in sys.modules, # Don't attempt this unless a module really exists in sys.modules,
# else we could return junk. # else we could return junk.
...@@ -696,6 +816,99 @@ class ParentEnumerationMethod(FinderMethod): ...@@ -696,6 +816,99 @@ class ParentEnumerationMethod(FinderMethod):
return self._found_module(fullname, path, fp) return self._found_module(fullname, path, fp)
class ParentSpecEnumerationMethod(ParentImpEnumerationMethod):
def _find_parent_spec(self, fullname):
#history = []
debug = self.log.debug
children = []
for parent_name, child_name in self._iter_parents(fullname):
children.insert(0, child_name)
if not parent_name:
debug('abandoning %r, reached top-level', fullname)
return None, children
try:
parent = sys.modules[parent_name]
except KeyError:
debug('skipping %r, not in sys.modules', parent_name)
continue
try:
spec = parent.__spec__
except AttributeError:
debug('skipping %r: %r.__spec__ is absent',
parent_name, parent)
continue
if not spec:
debug('skipping %r: %r.__spec__=%r',
parent_name, parent, spec)
continue
if spec.name != parent_name:
debug('skipping %r: %r.__spec__.name=%r does not match',
parent_name, parent, spec.name)
continue
if not spec.submodule_search_locations:
debug('skipping %r: %r.__spec__.submodule_search_locations=%r',
parent_name, parent, spec.submodule_search_locations)
continue
return spec, children
raise ValueError('%s._find_parent_spec(%r) unexpectedly reached bottom'
% (self.__class__.__name__, fullname))
def find(self, fullname):
# Returns absolute path, ParentImpEnumerationMethod returns relative
# >>> spec_pem.find('six_brokenpkg._six')[::2]
# ('/Users/alex/src/mitogen/tests/data/importer/six_brokenpkg/_six.py', False)
if sys.version_info < (3, 4):
return None
fullname = to_text(fullname)
spec, children = self._find_parent_spec(fullname)
for child_name in children:
if spec:
name = '%s.%s' % (spec.name, child_name)
submodule_search_locations = spec.submodule_search_locations
else:
name = child_name
submodule_search_locations = None
spec = importlib.util._find_spec(name, submodule_search_locations)
if spec is None:
self.log.debug('%r spec unavailable from %s', fullname, spec)
return None
is_package = spec.submodule_search_locations is not None
if name != fullname:
if not is_package:
self.log.debug('%r appears to be child of non-package %r',
fullname, spec)
return None
continue
if not spec.has_location:
self.log.debug('%r.origin cannot be read as a file', spec)
return None
if os.path.splitext(spec.origin)[1] != '.py':
self.log.debug('%r.origin does not contain Python source code',
spec)
return None
# FIXME This should use loader.get_source()
with open(spec.origin, 'rb') as f:
source = f.read()
return spec.origin, source, is_package
raise ValueError('%s.find(%r) unexpectedly reached bottom'
% (self.__class__.__name__, fullname))
class ModuleFinder(object): class ModuleFinder(object):
""" """
Given the name of a loaded module, make a best-effort attempt at finding Given the name of a loaded module, make a best-effort attempt at finding
...@@ -736,7 +949,8 @@ class ModuleFinder(object): ...@@ -736,7 +949,8 @@ class ModuleFinder(object):
DefectivePython3xMainMethod(), DefectivePython3xMainMethod(),
PkgutilMethod(), PkgutilMethod(),
SysModulesMethod(), SysModulesMethod(),
ParentEnumerationMethod(), ParentSpecEnumerationMethod(),
ParentImpEnumerationMethod(),
] ]
def get_module_source(self, fullname): def get_module_source(self, fullname):
...@@ -971,7 +1185,7 @@ class ModuleResponder(object): ...@@ -971,7 +1185,7 @@ class ModuleResponder(object):
self.minify_secs += mitogen.core.now() - t0 self.minify_secs += mitogen.core.now() - t0
if is_pkg: if is_pkg:
pkg_present = get_child_modules(path) pkg_present = get_child_modules(path, fullname)
self._log.debug('%s is a package at %s with submodules %r', self._log.debug('%s is a package at %s with submodules %r',
fullname, path, pkg_present) fullname, path, pkg_present)
else: else:
...@@ -1134,7 +1348,7 @@ class Broker(mitogen.core.Broker): ...@@ -1134,7 +1348,7 @@ class Broker(mitogen.core.Broker):
def __init__(self, install_watcher=True): def __init__(self, install_watcher=True):
if install_watcher: if install_watcher:
self._watcher = ThreadWatcher.watch( self._watcher = ThreadWatcher.watch(
target=threading.currentThread(), target=mitogen.core.threading__current_thread(),
on_join=self.shutdown, on_join=self.shutdown,
) )
super(Broker, self).__init__() super(Broker, self).__init__()
...@@ -1279,7 +1493,8 @@ class Router(mitogen.parent.Router): ...@@ -1279,7 +1493,8 @@ class Router(mitogen.parent.Router):
self.broker.defer(stream.on_disconnect, self.broker) self.broker.defer(stream.on_disconnect, self.broker)
def disconnect_all(self): def disconnect_all(self):
for stream in self._stream_by_id.values(): # making stream_by_id python3-safe by converting stream_by_id values iter to list
for stream in list(self._stream_by_id.values()):
self.disconnect_stream(stream) self.disconnect_stream(stream)
......
...@@ -35,7 +35,6 @@ Support for operating in a mixed threading/forking environment. ...@@ -35,7 +35,6 @@ Support for operating in a mixed threading/forking environment.
import os import os
import socket import socket
import sys import sys
import threading
import weakref import weakref
import mitogen.core import mitogen.core
...@@ -158,7 +157,7 @@ class Corker(object): ...@@ -158,7 +157,7 @@ class Corker(object):
held. This will not return until each thread acknowledges it has ceased held. This will not return until each thread acknowledges it has ceased
execution. execution.
""" """
current = threading.currentThread() current = mitogen.core.threading__current_thread()
s = mitogen.core.b('CORK') * ((128 // 4) * 1024) s = mitogen.core.b('CORK') * ((128 // 4) * 1024)
self._rsocks = [] self._rsocks = []
......
...@@ -34,7 +34,7 @@ sent to any child context that is due to become a parent, due to recursive ...@@ -34,7 +34,7 @@ sent to any child context that is due to become a parent, due to recursive
connection. connection.
""" """
import codecs import binascii
import errno import errno
import fcntl import fcntl
import getpass import getpass
...@@ -1027,7 +1027,7 @@ class KqueuePoller(mitogen.core.Poller): ...@@ -1027,7 +1027,7 @@ class KqueuePoller(mitogen.core.Poller):
class EpollPoller(mitogen.core.Poller): class EpollPoller(mitogen.core.Poller):
""" """
Poller based on the Linux :linux:man2:`epoll` interface. Poller based on the Linux :linux:man7:`epoll` interface.
""" """
SUPPORTED = hasattr(select, 'epoll') SUPPORTED = hasattr(select, 'epoll')
_repr = 'EpollPoller()' _repr = 'EpollPoller()'
...@@ -1405,13 +1405,24 @@ class Connection(object): ...@@ -1405,13 +1405,24 @@ class Connection(object):
# file descriptor 0 as 100, creates a pipe, then execs a new interpreter # file descriptor 0 as 100, creates a pipe, then execs a new interpreter
# with a custom argv. # with a custom argv.
# * Optimized for minimum byte count after minification & compression. # * Optimized for minimum byte count after minification & compression.
# The script preamble_size.py measures this.
# * 'CONTEXT_NAME' and 'PREAMBLE_COMPRESSED_LEN' are substituted with # * 'CONTEXT_NAME' and 'PREAMBLE_COMPRESSED_LEN' are substituted with
# their respective values. # their respective values.
# * CONTEXT_NAME must be prefixed with the name of the Python binary in # * CONTEXT_NAME must be prefixed with the name of the Python binary in
# order to allow virtualenvs to detect their install prefix. # order to allow virtualenvs to detect their install prefix.
# * For Darwin, OS X installs a craptacular argv0-introspecting Python #
# version switcher as /usr/bin/python. Override attempts to call it # macOS tweaks for Python 2.7 must be kept in sync with the the Ansible
# with an explicit call to python2.7 # module test_echo_module, used by the integration tests.
# * macOS <= 10.14 (Darwin <= 18) install an unreliable Python version
# switcher as /usr/bin/python, which introspects argv0. To workaround
# it we redirect attempts to call /usr/bin/python with an explicit
# call to /usr/bin/python2.7. macOS 10.15 (Darwin 19) removed it.
# * macOS 11.x (Darwin 20, Big Sur) and macOS 12.x (Darwin 21, Montery)
# do something slightly different. The Python executable is patched to
# perform an extra execvp(). I don't fully understand the details, but
# setting PYTHON_LAUNCHED_FROM_WRAPPER=1 avoids it.
# * macOS 12.3+ (Darwin 21.4+, Monterey) doesn't ship Python.
# https://developer.apple.com/documentation/macos-release-notes/macos-12_3-release-notes#Python
# #
# Locals: # Locals:
# R: read side of interpreter stdin. # R: read side of interpreter stdin.
...@@ -1434,12 +1445,12 @@ class Connection(object): ...@@ -1434,12 +1445,12 @@ class Connection(object):
os.close(r) os.close(r)
os.close(W) os.close(W)
os.close(w) os.close(w)
if sys.platform == 'darwin' and sys.executable == '/usr/bin/python': if os.uname()[0]=='Darwin'and os.uname()[2][:2]<'19'and sys.executable=='/usr/bin/python':sys.executable='/usr/bin/python2.7'
sys.executable += sys.version[:3] if os.uname()[0]=='Darwin'and os.uname()[2][:2]in'2021'and sys.version[:3]=='2.7':os.environ['PYTHON_LAUNCHED_FROM_WRAPPER']='1'
os.environ['ARGV0']=sys.executable os.environ['ARGV0']=sys.executable
os.execl(sys.executable,sys.executable+'(mitogen:CONTEXT_NAME)') os.execl(sys.executable,sys.executable+'(mitogen:CONTEXT_NAME)')
os.write(1,'MITO000\n'.encode()) os.write(1,'MITO000\n'.encode())
C=_(os.fdopen(0,'rb').read(PREAMBLE_COMPRESSED_LEN),'zip') C=zlib.decompress(os.fdopen(0,'rb').read(PREAMBLE_COMPRESSED_LEN))
fp=os.fdopen(W,'wb',0) fp=os.fdopen(W,'wb',0)
fp.write(C) fp.write(C)
fp.close() fp.close()
...@@ -1465,22 +1476,22 @@ class Connection(object): ...@@ -1465,22 +1476,22 @@ class Connection(object):
def get_boot_command(self): def get_boot_command(self):
source = inspect.getsource(self._first_stage) source = inspect.getsource(self._first_stage)
source = textwrap.dedent('\n'.join(source.strip().split('\n')[2:])) source = textwrap.dedent('\n'.join(source.strip().split('\n')[2:]))
source = source.replace(' ', '\t') source = source.replace(' ', ' ')
source = source.replace('CONTEXT_NAME', self.options.remote_name) source = source.replace('CONTEXT_NAME', self.options.remote_name)
preamble_compressed = self.get_preamble() preamble_compressed = self.get_preamble()
source = source.replace('PREAMBLE_COMPRESSED_LEN', source = source.replace('PREAMBLE_COMPRESSED_LEN',
str(len(preamble_compressed))) str(len(preamble_compressed)))
compressed = zlib.compress(source.encode(), 9) compressed = zlib.compress(source.encode(), 9)
encoded = codecs.encode(compressed, 'base64').replace(b('\n'), b('')) encoded = binascii.b2a_base64(compressed).replace(b('\n'), b(''))
# We can't use bytes.decode() in 3.x since it was restricted to always
# return unicode, so codecs.decode() is used instead. In 3.x # Just enough to decode, decompress, and exec the first stage.
# codecs.decode() requires a bytes object. Since we must be compatible # Priorities: wider compatibility, faster startup, shorter length.
# with 2.4 (no bytes literal), an extra .encode() either returns the # `import os` here, instead of stage 1, to save a few bytes.
# same str (2.x) or an equivalent bytes (3.x). # `sys.path=...` for https://github.com/python/cpython/issues/115911.
return self.get_python_argv() + [ return self.get_python_argv() + [
'-c', '-c',
'import codecs,os,sys;_=codecs.decode;' 'import sys;sys.path=[p for p in sys.path if p];import binascii,os,zlib;'
'exec(_(_("%s".encode(),"base64"),"zip"))' % (encoded.decode(),) 'exec(zlib.decompress(binascii.a2b_base64("%s")))' % (encoded.decode(),),
] ]
def get_econtext_config(self): def get_econtext_config(self):
...@@ -1502,7 +1513,7 @@ class Connection(object): ...@@ -1502,7 +1513,7 @@ class Connection(object):
def get_preamble(self): def get_preamble(self):
suffix = ( suffix = (
'\nExternalContext(%r).main()\n' %\ '\nExternalContext(%r).main()\n' %
(self.get_econtext_config(),) (self.get_econtext_config(),)
) )
partial = get_core_source_partial() partial = get_core_source_partial()
...@@ -2501,6 +2512,9 @@ class Router(mitogen.core.Router): ...@@ -2501,6 +2512,9 @@ class Router(mitogen.core.Router):
def ssh(self, **kwargs): def ssh(self, **kwargs):
return self.connect(u'ssh', **kwargs) return self.connect(u'ssh', **kwargs)
def podman(self, **kwargs):
return self.connect(u'podman', **kwargs)
class Reaper(object): class Reaper(object):
""" """
......
# Copyright 2019, David Wilson
# Copyright 2021, Mitogen contributors
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# !mitogen: minify_safe
import logging
import mitogen.parent
LOG = logging.getLogger(__name__)
class Options(mitogen.parent.Options):
container = None
username = None
podman_path = 'podman'
def __init__(self, container=None, podman_path=None, username=None,
**kwargs):
super(Options, self).__init__(**kwargs)
assert container is not None
self.container = container
if podman_path:
self.podman_path = podman_path
if username:
self.username = username
class Connection(mitogen.parent.Connection):
options_class = Options
child_is_immediate_subprocess = False
# TODO: better way of capturing errors such as "No such container."
create_child_args = {
'merge_stdio': True
}
def _get_name(self):
return u'podman.' + self.options.container
def get_boot_command(self):
args = [self.options.podman_path, 'exec']
if self.options.username:
args += ['--user=' + self.options.username]
args += ["--interactive", "--", self.options.container]
return args + super(Connection, self).get_boot_command()
...@@ -90,7 +90,7 @@ def merge_stats(outpath, inpaths): ...@@ -90,7 +90,7 @@ def merge_stats(outpath, inpaths):
break break
time.sleep(0.2) time.sleep(0.2)
stats.dump_stats(outpath) pstats.dump_stats(outpath)
def generate_stats(outpath, tmpdir): def generate_stats(outpath, tmpdir):
......
...@@ -31,7 +31,6 @@ ...@@ -31,7 +31,6 @@
import grp import grp
import logging import logging
import os import os
import os.path
import pprint import pprint
import pwd import pwd
import stat import stat
...@@ -74,7 +73,7 @@ else: ...@@ -74,7 +73,7 @@ else:
@mitogen.core.takes_router @mitogen.core.takes_router
def get_or_create_pool(size=None, router=None): def get_or_create_pool(size=None, router=None, context=None):
global _pool global _pool
global _pool_pid global _pool_pid
...@@ -84,6 +83,12 @@ def get_or_create_pool(size=None, router=None): ...@@ -84,6 +83,12 @@ def get_or_create_pool(size=None, router=None):
_pool_lock.acquire() _pool_lock.acquire()
try: try:
if _pool_pid != my_pid: if _pool_pid != my_pid:
if router is None:
# fallback to trying to get router from context if that exists
if context is not None:
router = context.router
else:
raise ValueError("Unable to create Pool! Missing router.")
_pool = Pool( _pool = Pool(
router, router,
services=[], services=[],
...@@ -103,7 +108,8 @@ def get_or_create_pool(size=None, router=None): ...@@ -103,7 +108,8 @@ def get_or_create_pool(size=None, router=None):
def get_thread_name(): def get_thread_name():
return threading.currentThread().getName() thread = mitogen.core.threading__current_thread()
return mitogen.core.threading__thread_name(thread)
def call(service_name, method_name, call_context=None, **kwargs): def call(service_name, method_name, call_context=None, **kwargs):
...@@ -119,7 +125,7 @@ def call(service_name, method_name, call_context=None, **kwargs): ...@@ -119,7 +125,7 @@ def call(service_name, method_name, call_context=None, **kwargs):
if call_context: if call_context:
return call_context.call_service(service_name, method_name, **kwargs) return call_context.call_service(service_name, method_name, **kwargs)
else: else:
pool = get_or_create_pool() pool = get_or_create_pool(context=kwargs.get('context'))
invoker = pool.get_invoker(service_name, msg=None) invoker = pool.get_invoker(service_name, msg=None)
return getattr(invoker.service, method_name)(**kwargs) return getattr(invoker.service, method_name)(**kwargs)
...@@ -685,6 +691,7 @@ class PushFileService(Service): ...@@ -685,6 +691,7 @@ class PushFileService(Service):
super(PushFileService, self).__init__(**kwargs) super(PushFileService, self).__init__(**kwargs)
self._lock = threading.Lock() self._lock = threading.Lock()
self._cache = {} self._cache = {}
self._extra_sys_paths = set()
self._waiters = {} self._waiters = {}
self._sent_by_stream = {} self._sent_by_stream = {}
...@@ -738,30 +745,59 @@ class PushFileService(Service): ...@@ -738,30 +745,59 @@ class PushFileService(Service):
@arg_spec({ @arg_spec({
'context': mitogen.core.Context, 'context': mitogen.core.Context,
'paths': list, 'paths': list,
'modules': list, # 'modules': list, TODO, modules was passed into this func but it's not used yet
}) })
def propagate_paths_and_modules(self, context, paths, modules): def propagate_paths_and_modules(self, context, paths, overridden_sources=None, extra_sys_paths=None):
""" """
One size fits all method to ensure a target context has been preloaded One size fits all method to ensure a target context has been preloaded
with a set of small files and Python modules. with a set of small files and Python modules.
:param dict overridden_sources:
Optional dict containing source code to override path's source code
:param extra_sys_paths:
Loads additional sys paths for use in finding modules; beneficial
in situations like loading Ansible Collections because source code
dependencies come from different file paths than where the source lives
""" """
for path in paths: for path in paths:
self.propagate_to(context, mitogen.core.to_text(path)) overridden_source = None
#self.router.responder.forward_modules(context, modules) TODO if overridden_sources is not None and path in overridden_sources:
overridden_source = overridden_sources[path]
self.propagate_to(context, mitogen.core.to_text(path), overridden_source)
# self.router.responder.forward_modules(context, modules) TODO
# NOTE: could possibly be handled by the above TODO, but not sure how forward_modules works enough
# to know for sure, so for now going to pass the sys paths themselves and have `propagate_to`
# load them up in sys.path for later import
# ensure we don't add to sys.path the same path we've already seen
for extra_path in extra_sys_paths:
# store extra paths in cached set for O(1) lookup
if extra_path not in self._extra_sys_paths:
# not sure if it matters but we could prepend to sys.path instead if we need to
sys.path.append(extra_path)
self._extra_sys_paths.add(extra_path)
@expose(policy=AllowParents()) @expose(policy=AllowParents())
@arg_spec({ @arg_spec({
'context': mitogen.core.Context, 'context': mitogen.core.Context,
'path': mitogen.core.FsPathTypes, 'path': mitogen.core.FsPathTypes,
}) })
def propagate_to(self, context, path): def propagate_to(self, context, path, overridden_source=None):
"""
If the optional parameter 'overridden_source' is passed, use
that instead of the path's code as source code. This works around some bugs
of source modules such as relative imports on unsupported Python versions
"""
if path not in self._cache: if path not in self._cache:
LOG.debug('caching small file %s', path) LOG.debug('caching small file %s', path)
if overridden_source is None:
fp = open(path, 'rb') fp = open(path, 'rb')
try: try:
self._cache[path] = mitogen.core.Blob(fp.read()) self._cache[path] = mitogen.core.Blob(fp.read())
finally: finally:
fp.close() fp.close()
else:
self._cache[path] = mitogen.core.Blob(overridden_source)
self._forward(context, path) self._forward(context, path)
@expose(policy=AllowParents()) @expose(policy=AllowParents())
......
...@@ -72,7 +72,10 @@ PASSWORD_PROMPT_PATTERN = re.compile( ...@@ -72,7 +72,10 @@ PASSWORD_PROMPT_PATTERN = re.compile(
) )
HOSTKEY_REQ_PATTERN = re.compile( HOSTKEY_REQ_PATTERN = re.compile(
b(r'are you sure you want to continue connecting \(yes/no\)\?'), b(
r'are you sure you want to continue connecting '
r'\(yes/no(?:/\[fingerprint\])?\)\?'
),
re.I re.I
) )
...@@ -221,6 +224,14 @@ class Connection(mitogen.parent.Connection): ...@@ -221,6 +224,14 @@ class Connection(mitogen.parent.Connection):
child_is_immediate_subprocess = False child_is_immediate_subprocess = False
# strings that, if escaped, cause problems creating connections
# example: `source /opt/rh/rh-python36/enable && python`
# is an acceptable ansible_python_version but shlex would quote the &&
# and prevent python from executing
SHLEX_IGNORE = [
"&&"
]
def _get_name(self): def _get_name(self):
s = u'ssh.' + mitogen.core.to_text(self.options.hostname) s = u'ssh.' + mitogen.core.to_text(self.options.hostname)
if self.options.port and self.options.port != 22: if self.options.port and self.options.port != 22:
...@@ -291,4 +302,9 @@ class Connection(mitogen.parent.Connection): ...@@ -291,4 +302,9 @@ class Connection(mitogen.parent.Connection):
bits += self.options.ssh_args bits += self.options.ssh_args
bits.append(self.options.hostname) bits.append(self.options.hostname)
base = super(Connection, self).get_boot_command() base = super(Connection, self).get_boot_command()
return bits + [shlex_quote(s).strip() for s in base]
base_parts = []
for s in base:
val = s if s in self.SHLEX_IGNORE else shlex_quote(s).strip()
base_parts.append(val)
return bits + base_parts
...@@ -256,6 +256,8 @@ class Connection(mitogen.parent.Connection): ...@@ -256,6 +256,8 @@ class Connection(mitogen.parent.Connection):
# Note: sudo did not introduce long-format option processing until July # Note: sudo did not introduce long-format option processing until July
# 2013, so even though we parse long-format options, supply short-form # 2013, so even though we parse long-format options, supply short-form
# to the sudo command. # to the sudo command.
boot_cmd = super(Connection, self).get_boot_command()
bits = [self.options.sudo_path, '-u', self.options.username] bits = [self.options.sudo_path, '-u', self.options.username]
if self.options.preserve_env: if self.options.preserve_env:
bits += ['-E'] bits += ['-E']
...@@ -268,4 +270,25 @@ class Connection(mitogen.parent.Connection): ...@@ -268,4 +270,25 @@ class Connection(mitogen.parent.Connection):
if self.options.selinux_type: if self.options.selinux_type:
bits += ['-t', self.options.selinux_type] bits += ['-t', self.options.selinux_type]
return bits + ['--'] + super(Connection, self).get_boot_command() # special handling for bash builtins
# TODO: more efficient way of doing this, at least
# it's only 1 iteration of boot_cmd to go through
source_found = False
for cmd in boot_cmd[:]:
# rip `source` from boot_cmd if it exists; sudo.py can't run this
# even with -i or -s options
# since we've already got our ssh command working we shouldn't
# need to source anymore
# couldn't figure out how to get this to work using sudo flags
if 'source' == cmd:
boot_cmd.remove(cmd)
source_found = True
continue
if source_found:
# remove words until we hit the python interpreter call
if not cmd.endswith('python'):
boot_cmd.remove(cmd)
else:
break
return bits + ['--'] + boot_cmd
...@@ -29,14 +29,13 @@ ...@@ -29,14 +29,13 @@
# !mitogen: minify_safe # !mitogen: minify_safe
import datetime import datetime
import functools
import logging import logging
import os import os
import sys import sys
import mitogen
import mitogen.core import mitogen.core
import mitogen.master import mitogen.master
import mitogen.parent
iteritems = getattr(dict, 'iteritems', dict.items) iteritems = getattr(dict, 'iteritems', dict.items)
...@@ -173,12 +172,9 @@ def with_router(func): ...@@ -173,12 +172,9 @@ def with_router(func):
do_stuff(blah, 123) do_stuff(blah, 123)
""" """
@functools.wraps(func)
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
return run_with_router(func, *args, **kwargs) return run_with_router(func, *args, **kwargs)
if mitogen.core.PY3:
wrapper.func_name = func.__name__
else:
wrapper.func_name = func.func_name
return wrapper return wrapper
...@@ -194,10 +190,13 @@ PASSTHROUGH = ( ...@@ -194,10 +190,13 @@ PASSTHROUGH = (
def cast(obj): def cast(obj):
""" """
Return obj (or a copy) with subtypes of builtins cast to their supertype.
Subtypes of those in :data:`PASSTHROUGH` are not modified.
Many tools love to subclass built-in types in order to implement useful Many tools love to subclass built-in types in order to implement useful
functionality, such as annotating the safety of a Unicode string, or adding functionality, such as annotating the safety of a Unicode string, or adding
additional methods to a dict. However, cPickle loves to preserve those additional methods to a dict. However :py:mod:`pickle` serializes these
subtypes during serialization, resulting in CallError during :meth:`call exactly, leading to :exc:`mitogen.CallError` during :meth:`Context.call
<mitogen.parent.Context.call>` in the target when it tries to deserialize <mitogen.parent.Context.call>` in the target when it tries to deserialize
the data. the data.
...@@ -205,6 +204,9 @@ def cast(obj): ...@@ -205,6 +204,9 @@ def cast(obj):
custom sub-types removed. The functionality is not default since the custom sub-types removed. The functionality is not default since the
resulting walk may be computationally expensive given a large enough graph. resulting walk may be computationally expensive given a large enough graph.
Raises :py:exc:`TypeError` if an unknown subtype is encountered, or
casting does not return the desired supertype.
See :ref:`serialization-rules` for a list of supported types. See :ref:`serialization-rules` for a list of supported types.
:param obj: :param obj:
...@@ -219,8 +221,16 @@ def cast(obj): ...@@ -219,8 +221,16 @@ def cast(obj):
if isinstance(obj, PASSTHROUGH): if isinstance(obj, PASSTHROUGH):
return obj return obj
if isinstance(obj, mitogen.core.UnicodeType): if isinstance(obj, mitogen.core.UnicodeType):
return mitogen.core.UnicodeType(obj) return _cast(obj, mitogen.core.UnicodeType)
if isinstance(obj, mitogen.core.BytesType): if isinstance(obj, mitogen.core.BytesType):
return mitogen.core.BytesType(obj) return _cast(obj, mitogen.core.BytesType)
raise TypeError("Cannot serialize: %r: %r" % (type(obj), obj)) raise TypeError("Cannot serialize: %r: %r" % (type(obj), obj))
def _cast(obj, desired_type):
result = desired_type(obj)
if type(result) is not desired_type:
raise TypeError("Cast of %r to %r failed, got %r"
% (type(obj), desired_type, type(result)))
return result
[bdist_wheel]
universal = 1
[coverage:run] [coverage:run]
branch = true branch = true
source = source =
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import ast
import os import os
from setuptools import find_packages, setup from setuptools import find_packages, setup
...@@ -37,29 +38,47 @@ def grep_version(): ...@@ -37,29 +38,47 @@ def grep_version():
for line in fp: for line in fp:
if line.startswith('__version__'): if line.startswith('__version__'):
_, _, s = line.partition('=') _, _, s = line.partition('=')
return '.'.join(map(str, eval(s))) parts = ast.literal_eval(s.strip())
return '.'.join(str(part) for part in parts)
def long_description():
here = os.path.dirname(__file__)
readme_path = os.path.join(here, 'README.md')
with open(readme_path) as fp:
readme = fp.read()
return readme
setup( setup(
name = 'mitogen', name = 'mitogen',
version = grep_version(), version = grep_version(),
description = 'Library for writing distributed self-replicating programs.', description = 'Library for writing distributed self-replicating programs.',
long_description = long_description(),
long_description_content_type='text/markdown',
author = 'David Wilson', author = 'David Wilson',
license = 'New BSD', license = 'New BSD',
url = 'https://github.com/dw/mitogen/', url = 'https://github.com/mitogen-hq/mitogen/',
packages = find_packages(exclude=['tests', 'examples']), packages = find_packages(exclude=['tests', 'examples']),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
zip_safe = False, zip_safe = False,
classifiers = [ classifiers = [
'Environment :: Console', 'Environment :: Console',
'Framework :: Ansible',
'Intended Audience :: System Administrators', 'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License', 'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX', 'Operating System :: POSIX',
'Programming Language :: Python', 'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Distributed Computing', 'Topic :: System :: Distributed Computing',
'Topic :: System :: Systems Administration', 'Topic :: System :: Systems Administration',
......
import errno
import io
import logging
import os
import random
import re
import socket
import stat
import sys
import threading
import time
import traceback
import unittest
try:
import configparser
except ImportError:
import ConfigParser as configparser
import psutil
if sys.version_info < (3, 0):
import subprocess32 as subprocess
else:
import subprocess
import mitogen.core
import mitogen.fork
import mitogen.master
import mitogen.utils
try:
import faulthandler
except ImportError:
faulthandler = None
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
try:
BaseException
except NameError:
BaseException = Exception
LOG = logging.getLogger(__name__)
TESTS_DIR = os.path.join(os.path.dirname(__file__))
ANSIBLE_LIB_DIR = os.path.join(TESTS_DIR, 'ansible', 'lib')
ANSIBLE_MODULE_UTILS_DIR = os.path.join(TESTS_DIR, 'ansible', 'lib', 'module_utils')
ANSIBLE_MODULES_DIR = os.path.join(TESTS_DIR, 'ansible', 'lib', 'modules')
DATA_DIR = os.path.join(TESTS_DIR, 'data')
MODS_DIR = os.path.join(TESTS_DIR, 'data', 'importer')
sys.path.append(DATA_DIR)
sys.path.append(MODS_DIR)
if mitogen.is_master:
mitogen.utils.log_to_file()
if faulthandler is not None:
faulthandler.enable()
#
# Temporary hack: Operon changed logging somewhat, and this broke LogCapturer /
# log_handler_test.
#
mitogen.core.LOG.propagate = True
def base_executable(executable=None):
'''Return the path of the Python executable used to create the virtualenv.
'''
# https://docs.python.org/3/library/venv.html
# https://github.com/pypa/virtualenv/blob/main/src/virtualenv/discovery/py_info.py
# https://virtualenv.pypa.io/en/16.7.9/reference.html#compatibility-with-the-stdlib-venv-module
if executable is None:
executable = sys.executable
if not executable:
raise ValueError
try:
base_executable = sys._base_executable
except AttributeError:
base_executable = None
if base_executable and base_executable != executable:
return base_executable
# Python 2.x only has sys.base_prefix if running outside a virtualenv.
try:
sys.base_prefix
except AttributeError:
# Python 2.x outside a virtualenv
return executable
# Python 3.3+ has sys.base_prefix. In a virtualenv it differs to sys.prefix.
if sys.base_prefix == sys.prefix:
return executable
while executable.startswith(sys.prefix) and stat.S_ISLNK(os.lstat(executable).st_mode):
dirname = os.path.dirname(executable)
target = os.path.join(dirname, os.readlink(executable))
executable = os.path.abspath(os.path.normpath(target))
print(executable)
if executable.startswith(sys.base_prefix):
return executable
# Virtualenvs record details in pyvenv.cfg
parser = configparser.RawConfigParser()
with io.open(os.path.join(sys.prefix, 'pyvenv.cfg'), encoding='utf-8') as f:
content = u'[virtualenv]\n' + f.read()
try:
parser.read_string(content)
except AttributeError:
parser.readfp(io.StringIO(content))
# virtualenv style pyvenv.cfg includes the base executable.
# venv style pyvenv.cfg doesn't.
try:
return parser.get(u'virtualenv', u'base-executable')
except configparser.NoOptionError:
pass
basename = os.path.basename(executable)
home = parser.get(u'virtualenv', u'home')
return os.path.join(home, basename)
def data_path(suffix):
path = os.path.join(DATA_DIR, suffix)
if path.endswith('.key'):
# SSH is funny about private key permissions.
os.chmod(path, int('0600', 8))
return path
def threading__thread_is_alive(thread):
"""Return whether the thread is alive (Python version compatibility shim).
On Python >= 3.8 thread.isAlive() is deprecated (removed in Python 3.9).
On Python <= 2.5 thread.is_alive() isn't present (added in Python 2.6).
"""
try:
return thread.is_alive()
except AttributeError:
return thread.isAlive()
def threading_thread_name(thread):
try:
return thread.name # Available in Python 2.6+
except AttributeError:
return thread.getName() # Deprecated in Python 3.10+
def wait_for_port(
host,
port,
pattern=None,
connect_timeout=0.5,
receive_timeout=0.5,
overall_timeout=5.0,
sleep=0.1,
):
"""Attempt to connect to host/port, for upto overall_timeout seconds.
If a regex pattern is supplied try to find it in the initial data.
Return None on success, or raise on error.
"""
start = mitogen.core.now()
end = start + overall_timeout
addr = (host, port)
while mitogen.core.now() < end:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(connect_timeout)
try:
sock.connect(addr)
except socket.error:
# Failed to connect. So wait then retry.
time.sleep(sleep)
continue
if not pattern:
# Success: We connected & there's no banner check to perform.
sock.shutdown(socket.SHUT_RDWR)
sock.close()
return
sock.settimeout(receive_timeout)
data = mitogen.core.b('')
found = False
while mitogen.core.now() < end:
try:
resp = sock.recv(1024)
except socket.timeout:
# Server stayed up, but had no data. Retry the recv().
continue
if not resp:
# Server went away. Wait then retry the connection.
time.sleep(sleep)
break
data += resp
if re.search(mitogen.core.b(pattern), data):
found = True
break
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error:
e = sys.exc_info()[1]
# On Mac OS X - a BSD variant - the above code only succeeds if the
# operating system thinks that the socket is still open when
# shutdown() is invoked. If Python is too slow and the FIN packet
# arrives before that statement can be reached, then OS X kills the
# sock.shutdown() statement with:
#
# socket.error: [Errno 57] Socket is not connected
#
# Protect shutdown() with a try...except that catches the
# socket.error, test to make sure Errno is right, and ignore it if
# Errno matches.
if e.errno == 57:
pass
else:
raise
sock.close()
if found:
# Success: We received the banner & found the desired pattern
return
else:
# Failure: The overall timeout expired
if pattern:
raise socket.timeout('Timed out while searching for %r from %s:%s'
% (pattern, host, port))
else:
raise socket.timeout('Timed out while connecting to %s:%s'
% (host, port))
def sync_with_broker(broker, timeout=10.0):
"""
Insert a synchronization barrier between the calling thread and the Broker
thread, ensuring it has completed at least one full IO loop before
returning.
Used to block while asynchronous stuff (like defer()) happens on the
broker.
"""
sem = mitogen.core.Latch()
broker.defer(sem.put, None)
sem.get(timeout=timeout)
def log_fd_calls():
mypid = os.getpid()
l = threading.Lock()
real_pipe = os.pipe
def pipe():
l.acquire()
try:
rv = real_pipe()
if mypid == os.getpid():
sys.stdout.write('\n%s\n' % (rv,))
traceback.print_stack(limit=3)
sys.stdout.write('\n')
return rv
finally:
l.release()
os.pipe = pipe
real_socketpair = socket.socketpair
def socketpair(*args):
l.acquire()
try:
rv = real_socketpair(*args)
if mypid == os.getpid():
sys.stdout.write('\n%s -> %s\n' % (args, rv))
traceback.print_stack(limit=3)
sys.stdout.write('\n')
return rv
finally:
l.release()
socket.socketpair = socketpair
real_dup2 = os.dup2
def dup2(*args):
l.acquire()
try:
real_dup2(*args)
if mypid == os.getpid():
sys.stdout.write('\n%s\n' % (args,))
traceback.print_stack(limit=3)
sys.stdout.write('\n')
finally:
l.release()
os.dup2 = dup2
real_dup = os.dup
def dup(*args):
l.acquire()
try:
rv = real_dup(*args)
if mypid == os.getpid():
sys.stdout.write('\n%s -> %s\n' % (args, rv))
traceback.print_stack(limit=3)
sys.stdout.write('\n')
return rv
finally:
l.release()
os.dup = dup
class CaptureStreamHandler(logging.StreamHandler):
def __init__(self, *args, **kwargs):
logging.StreamHandler.__init__(self, *args, **kwargs)
self.msgs = []
def emit(self, msg):
self.msgs.append(msg)
logging.StreamHandler.emit(self, msg)
class LogCapturer(object):
def __init__(self, name=None):
self.sio = StringIO()
self.logger = logging.getLogger(name)
self.handler = CaptureStreamHandler(self.sio)
self.old_propagate = self.logger.propagate
self.old_handlers = self.logger.handlers
self.old_level = self.logger.level
def start(self):
self.logger.handlers = [self.handler]
self.logger.propagate = False
self.logger.level = logging.DEBUG
def raw(self):
s = self.sio.getvalue()
# Python 2.x logging package hard-wires UTF-8 output.
if isinstance(s, mitogen.core.BytesType):
s = s.decode('utf-8')
return s
def msgs(self):
return self.handler.msgs
def __enter__(self):
self.start()
return self
def __exit__(self, _1, _2, _3):
self.stop()
def stop(self):
self.logger.level = self.old_level
self.logger.handlers = self.old_handlers
self.logger.propagate = self.old_propagate
return self.raw()
class TestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
# This is done in setUpClass() so we have a chance to run before any
# Broker() instantiations in setUp() etc.
mitogen.fork.on_fork()
cls._fds_before = psutil.Process().open_files()
# Ignore children started by external packages - in particular
# multiprocessing.resource_tracker.main()`, started when some Ansible
# versions instantiate a `multithreading.Lock()`.
cls._children_before = frozenset(psutil.Process().children())
super(TestCase, cls).setUpClass()
ALLOWED_THREADS = set([
'MainThread',
'mitogen.master.join_thread_async'
])
def _teardown_check_threads(self):
counts = {}
for thread in threading.enumerate():
name = threading_thread_name(thread)
# Python 2.4: enumerate() may return stopped threads.
assert \
not threading__thread_is_alive(thread) \
or name in self.ALLOWED_THREADS, \
'Found thread %r still running after tests.' % (name,)
counts[name] = counts.get(name, 0) + 1
for name in counts:
assert counts[name] == 1, \
'Found %d copies of thread %r running after tests.' % (
counts[name], name
)
def _teardown_check_fds(self):
mitogen.core.Latch._on_fork()
fds_after = psutil.Process().open_files()
fds_leaked = len(self._fds_before) != len(fds_after)
if not fds_leaked:
return
else:
if sys.platform == 'linux':
subprocess.check_call(
'lsof +E -w -p %i | grep -vw mem' % (os.getpid(),),
shell=True,
)
else:
subprocess.check_call(
'lsof -w -p %i | grep -vw mem' % (os.getpid(),),
shell=True,
)
assert 0, "%s leaked FDs: %s\nBefore:\t%s\nAfter:\t%s" % (
self, fds_leaked, self._fds_before, fds_after,
)
# Some class fixtures (like Ansible MuxProcess) start persistent children
# for the duration of the class.
no_zombie_check = False
def _teardown_check_zombies(self):
if self.no_zombie_check:
return
# pid=0: Wait for any child process in the same process group as us.
# WNOHANG: Don't block if no processes ready to report status.
try:
pid, status = os.waitpid(0, os.WNOHANG)
except OSError as e:
# ECHILD: there are no child processes in our group.
if e.errno == errno.ECHILD:
return
raise
if pid:
assert 0, "%s failed to reap subprocess %d (status %d)." % (
self, pid, status
)
children_after = frozenset(psutil.Process().children())
children_leaked = children_after.difference(self._children_before)
if not children_leaked:
return
print('Leaked children of unit test process:')
subprocess.check_call(
['ps', '-o', 'user,pid,%cpu,%mem,vsz,rss,tty,stat,start,time,command', '-ww', '-p',
','.join(str(p.pid) for p in children_leaked),
],
)
if self._children_before:
print('Pre-existing children of unit test process:')
subprocess.check_call(
['ps', '-o', 'user,pid,%cpu,%mem,vsz,rss,tty,stat,start,time,command', '-ww', '-p',
','.join(str(p.pid) for p in self._children_before),
],
)
assert 0, "%s leaked still-running subprocesses." % (self,)
def tearDown(self):
self._teardown_check_zombies()
self._teardown_check_threads()
self._teardown_check_fds()
super(TestCase, self).tearDown()
def assertRaises(self, exc, func, *args, **kwargs):
"""Like regular assertRaises, except return the exception that was
raised. Can't use context manager because tests must run on Python2.4"""
try:
func(*args, **kwargs)
except exc:
e = sys.exc_info()[1]
return e
except BaseException:
LOG.exception('Original exception')
e = sys.exc_info()[1]
assert 0, '%r raised %r, not %r' % (func, e, exc)
assert 0, '%r did not raise %r' % (func, exc)
def get_docker_host():
url = os.environ.get('DOCKER_HOST')
if url in (None, 'http+docker://localunixsocket'):
return 'localhost'
parsed = urlparse.urlparse(url)
return parsed.netloc.partition(':')[0]
class DockerizedSshDaemon(object):
PORT_RE = re.compile(
# e.g. 0.0.0.0:32771, :::32771, [::]:32771'
r'(?P<addr>[0-9.]+|::|\[[a-f0-9:.]+\]):(?P<port>[0-9]+)',
)
@classmethod
def get_port(cls, container):
s = subprocess.check_output(['docker', 'port', container, '22/tcp'])
m = cls.PORT_RE.search(s.decode())
if not m:
raise ValueError('could not find SSH port in: %r' % (s,))
return int(m.group('port'))
def start_container(self):
try:
subprocess.check_output(['docker', '--version'])
except Exception:
raise unittest.SkipTest('Docker binary is unavailable')
self.container_name = 'mitogen-test-%08x' % (random.getrandbits(64),)
args = [
'docker',
'run',
'--detach',
'--privileged',
'--publish-all',
'--name', self.container_name,
self.image,
]
subprocess.check_output(args)
def __init__(self, mitogen_test_distro=os.environ.get('MITOGEN_TEST_DISTRO', 'debian9')):
if '-' in mitogen_test_distro:
distro, _py3 = mitogen_test_distro.split('-')
else:
distro = mitogen_test_distro
_py3 = None
if _py3 == 'py3':
self.python_path = '/usr/bin/python3'
else:
self.python_path = '/usr/bin/python'
self.image = 'public.ecr.aws/n5z0e8q9/%s-test' % (distro,)
self.start_container()
self.host = self.get_host()
self.port = self.get_port(self.container_name)
def get_host(self):
return get_docker_host()
def wait_for_sshd(self):
wait_for_port(self.get_host(), self.port, pattern='OpenSSH')
def check_processes(self):
args = ['docker', 'exec', self.container_name, 'ps', '-o', 'comm=']
counts = {}
for comm in subprocess.check_output(args).decode().splitlines():
comm = comm.strip()
counts[comm] = counts.get(comm, 0) + 1
if counts != {'ps': 1, 'sshd': 1}:
assert 0, (
'Docker container %r contained extra running processes '
'after test completed: %r' % (
self.container_name,
counts
)
)
def close(self):
args = ['docker', 'rm', '-f', self.container_name]
subprocess.check_output(args)
class BrokerMixin(object):
broker_class = mitogen.master.Broker
broker_shutdown = False
def setUp(self):
super(BrokerMixin, self).setUp()
self.broker = self.broker_class()
def tearDown(self):
if not self.broker_shutdown:
self.broker.shutdown()
self.broker.join()
del self.broker
super(BrokerMixin, self).tearDown()
def sync_with_broker(self):
sync_with_broker(self.broker)
class RouterMixin(BrokerMixin):
router_class = mitogen.master.Router
def setUp(self):
super(RouterMixin, self).setUp()
self.router = self.router_class(self.broker)
def tearDown(self):
del self.router
super(RouterMixin, self).tearDown()
class DockerMixin(RouterMixin):
@classmethod
def setUpClass(cls):
super(DockerMixin, cls).setUpClass()
if os.environ.get('SKIP_DOCKER_TESTS'):
raise unittest.SkipTest('SKIP_DOCKER_TESTS is set')
# we want to be able to override test distro for some tests that need a different container spun up
daemon_args = {}
if hasattr(cls, 'mitogen_test_distro'):
daemon_args['mitogen_test_distro'] = cls.mitogen_test_distro
cls.dockerized_ssh = DockerizedSshDaemon(**daemon_args)
cls.dockerized_ssh.wait_for_sshd()
@classmethod
def tearDownClass(cls):
cls.dockerized_ssh.check_processes()
cls.dockerized_ssh.close()
super(DockerMixin, cls).tearDownClass()
@property
def docker_ssh_default_kwargs(self):
return {
'hostname': self.dockerized_ssh.host,
'port': self.dockerized_ssh.port,
'check_host_keys': 'ignore',
'ssh_debug_level': 3,
# https://www.openssh.com/legacy.html
# ssh-rsa uses SHA1. Least worst available with CentOS 7 sshd.
# Rejected by default in newer ssh clients (e.g. Ubuntu 22.04).
# Duplicated cases in
# - tests/ansible/ansible.cfg
# - tests/ansible/integration/connection_delegation/delegate_to_template.yml
# - tests/ansible/integration/connection_delegation/stack_construction.yml
# - tests/ansible/integration/process/unix_socket_cleanup.yml
# - tests/ansible/integration/ssh/variables.yml
# - tests/testlib.py
'ssh_args': [
'-o', 'HostKeyAlgorithms +ssh-rsa',
'-o', 'PubkeyAcceptedKeyTypes +ssh-rsa',
],
'python_path': self.dockerized_ssh.python_path,
}
def docker_ssh(self, **kwargs):
for k, v in self.docker_ssh_default_kwargs.items():
kwargs.setdefault(k, v)
return self.router.ssh(**kwargs)
def docker_ssh_any(self, **kwargs):
return self.docker_ssh(
username='mitogen__has_sudo_nopw',
password='has_sudo_nopw_password',
)
nextgen-router @ b1b259b4
Subproject commit f3c29e8153470a8e5d903e679cc82d9a4dd26ae5 Subproject commit b1b259b455f2c41892ab8b5b996116f71cb0fd27
init @ 734d8607
Subproject commit eaa2b3a15f318a9ad3d68845d0c630fd4b6bf5ff Subproject commit 734d86077fa83b1d78a695327760347d3649daa3
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment