This commit is contained in:
parent
97bf0ec1fe
commit
fe33aee2fe
28
.drone.yml
28
.drone.yml
@ -1,19 +1,19 @@
|
|||||||
---
|
---
|
||||||
kind: pipeline
|
# kind: pipeline
|
||||||
type: exec
|
# type: exec
|
||||||
name: default
|
# name: default
|
||||||
|
|
||||||
platform:
|
# platform:
|
||||||
os: linux
|
# os: linux
|
||||||
arch: amd64
|
# arch: amd64
|
||||||
|
|
||||||
|
|
||||||
clone:
|
# clone:
|
||||||
# убрано так как сейчас не тестим ничего предварительно а сразу тянем в директорию
|
# # убрано так как сейчас не тестим ничего предварительно а сразу тянем в директорию
|
||||||
disable: true
|
# disable: true
|
||||||
|
|
||||||
steps:
|
# steps:
|
||||||
- name: pull into the folduh
|
# - name: pull into the folduh
|
||||||
commands:
|
# commands:
|
||||||
- cd /etc/ansible
|
# - cd /etc/ansible
|
||||||
- git pull origin some-kind-of-lobster
|
# - git pull origin some-kind-of-lobster
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
|||||||
.vaulto
|
.vaulto
|
||||||
|
asdf
|
41
ansible.cfg
41
ansible.cfg
@ -1,4 +1,15 @@
|
|||||||
|
#### export ANSIBLE_CONFIG=./ansible.cfg
|
||||||
|
|
||||||
[defaults]
|
[defaults]
|
||||||
|
gathering = smart
|
||||||
|
fact_caching = jsonfile
|
||||||
|
fact_caching_connection = /tmp/facts_cache
|
||||||
|
# two hours timeout
|
||||||
|
fact_caching_timeout = 7200
|
||||||
|
|
||||||
|
|
||||||
|
interpreter_python = auto_silent
|
||||||
|
ansible_python_interpreter = auto_silent
|
||||||
# Use the YAML callback plugin.
|
# Use the YAML callback plugin.
|
||||||
stdout_callback = yaml
|
stdout_callback = yaml
|
||||||
# Use the stdout_callback when running ad-hoc commands.
|
# Use the stdout_callback when running ad-hoc commands.
|
||||||
@ -6,12 +17,28 @@ bin_ansible_callbacks = True
|
|||||||
|
|
||||||
host_key_checking = false
|
host_key_checking = false
|
||||||
|
|
||||||
vault_password_file = /etc/ansible/.vaulto
|
#vault_password_file = /etc/ansible/.vaulto
|
||||||
|
vault_password_file = /tmp/.vaulto
|
||||||
|
|
||||||
callback_plugins = /etc/ansible/plugins/callback
|
# callback_plugins = /etc/ansible/plugins/callback
|
||||||
callback_whitelist = telegram
|
# callback_whitelist = telegram
|
||||||
callbacks_enabled = telegram
|
# callbacks_enabled = telegram
|
||||||
|
|
||||||
[callback_telegram]
|
strategy_plugins = mitogen-0.3.9/ansible_mitogen/plugins/strategy
|
||||||
tg_token = 6472915685:AAHPvgrQoqG7DxtfbnHWPe3Lfild-CGJ1j8
|
strategy = mitogen_linear
|
||||||
tg_chat_id = -4023350326
|
|
||||||
|
#### TODO чому-то не делается
|
||||||
|
roles_path = roles:internal_roles
|
||||||
|
# # [callback_telegram]
|
||||||
|
# # tg_token = 6472915685:AAHPvgrQoqG7DxtfbnHWPe3Lfild-CGJ1j8
|
||||||
|
# # tg_chat_id = -4023350326
|
||||||
|
|
||||||
|
|
||||||
|
# добавление юзера
|
||||||
|
# useradd -m hogweed1 -s /usr/bin/bash
|
||||||
|
# passwd hogweed1
|
||||||
|
# sudo adduser hogweed1 sudo
|
||||||
|
|
||||||
|
[ssh_connection]
|
||||||
|
# Enable pipelining, requires disabling requiretty in sudoers
|
||||||
|
pipelining = True
|
5
environments/just-created/group_vars/all/ssh-creds.yml
Normal file
5
environments/just-created/group_vars/all/ssh-creds.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
ansible_ssh_user: root
|
||||||
|
ansible_ssh_pass: admin
|
||||||
|
ansible_sudo_pass: admin
|
||||||
|
ansible_ssh_private_key_file: '/home/hogweed1/id25519.key'
|
5
environments/just-created/group_vars/lxc.yml
Normal file
5
environments/just-created/group_vars/lxc.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
ansible_ssh_user: hogweed1
|
||||||
|
ansible_ssh_pass: coloredhorses
|
||||||
|
ansible_sudo_pass: coloredhorses
|
||||||
|
ansible_ssh_private_key_file: '/home/hogweed1/id25519.key'
|
11
environments/just-created/hosts.yml
Normal file
11
environments/just-created/hosts.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
# all: # keys must be unique, i.e. only one 'hosts' per group
|
||||||
|
# hosts:
|
||||||
|
# #nexus.guaranteedstruggle.host:
|
||||||
|
# #printing-slut.guaranteedstruggle.host:
|
||||||
|
# harbor.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
lxc: # keys must be unique, i.e. only one 'hosts' per group
|
||||||
|
hosts:
|
||||||
|
### but its a vm wtf
|
||||||
|
harbor.guaranteedstruggle.host:
|
2
environments/proxmoxes/group_vars/all/all.yml
Normal file
2
environments/proxmoxes/group_vars/all/all.yml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# отключаем ворнинг с митогена - https://github.com/mitogen-hq/mitogen/issues/740#issuecomment-731513058
|
||||||
|
ansible_python_interpreter: /usr/bin/python3
|
4
environments/proxmoxes/group_vars/all/ssh-creds.yml
Normal file
4
environments/proxmoxes/group_vars/all/ssh-creds.yml
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
ansible_ssh_user: hogweed1
|
||||||
|
ansible_ssh_pass: coloredhorses
|
||||||
|
ansible_sudo_pass: coloredhorses
|
55
environments/proxmoxes/hosts.yml
Normal file
55
environments/proxmoxes/hosts.yml
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
---
|
||||||
|
physical_machines:
|
||||||
|
hosts:
|
||||||
|
cyberbully.guaranteedstruggle.host:
|
||||||
|
gpu-slut.guaranteedstruggle.host:
|
||||||
|
children:
|
||||||
|
proxmoxes:
|
||||||
|
|
||||||
|
proxmoxes: # keys must be unique, i.e. only one 'hosts' per group
|
||||||
|
hosts:
|
||||||
|
king-albert.guaranteedstruggle.host:
|
||||||
|
children:
|
||||||
|
semyons:
|
||||||
|
|
||||||
|
semyons: # keys must be unique, i.e. only one 'hosts' per group
|
||||||
|
hosts:
|
||||||
|
semyon-0x01.guaranteedstruggle.host:
|
||||||
|
semyon-0x02.guaranteedstruggle.host:
|
||||||
|
semyon-0x03.guaranteedstruggle.host:
|
||||||
|
semyon-0x04.guaranteedstruggle.host:
|
||||||
|
semyon-0x05.guaranteedstruggle.host:
|
||||||
|
vms:
|
||||||
|
children:
|
||||||
|
printer:
|
||||||
|
kubernetes:
|
||||||
|
docker:
|
||||||
|
|
||||||
|
docker:
|
||||||
|
hosts:
|
||||||
|
swarm-node1.guaranteedstruggle.host:
|
||||||
|
swarm-node2.guaranteedstruggle.host:
|
||||||
|
swarm-node3.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
harbor.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
kubernetes:
|
||||||
|
hosts:
|
||||||
|
rke2-master1.guaranteedstruggle.host:
|
||||||
|
rke2-master2.guaranteedstruggle.host:
|
||||||
|
rke2-master3.guaranteedstruggle.host:
|
||||||
|
rke2-worker1.guaranteedstruggle.host:
|
||||||
|
rke2-worker2.guaranteedstruggle.host:
|
||||||
|
rke2-worker3.guaranteedstruggle.host:
|
||||||
|
rke2-worker4.guaranteedstruggle.host:
|
||||||
|
rke2-worker5.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
k3s-rancher.guaranteedstruggle.host:
|
||||||
|
k3s-awx.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
printer:
|
||||||
|
hosts:
|
||||||
|
printing-slut.guaranteedstruggle.host:
|
||||||
|
|
||||||
|
#### TODO
|
||||||
|
# lxc:
|
BIN
mitogen-0.3.9.tar.gz
Normal file
BIN
mitogen-0.3.9.tar.gz
Normal file
Binary file not shown.
26
mitogen-0.3.9/LICENSE
Normal file
26
mitogen-0.3.9/LICENSE
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
Copyright 2021, the Mitogen authors
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1
mitogen-0.3.9/MANIFEST.in
Normal file
1
mitogen-0.3.9/MANIFEST.in
Normal file
@ -0,0 +1 @@
|
|||||||
|
include LICENSE
|
39
mitogen-0.3.9/PKG-INFO
Normal file
39
mitogen-0.3.9/PKG-INFO
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: mitogen
|
||||||
|
Version: 0.3.9
|
||||||
|
Summary: Library for writing distributed self-replicating programs.
|
||||||
|
Home-page: https://github.com/mitogen-hq/mitogen/
|
||||||
|
Author: David Wilson
|
||||||
|
License: New BSD
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Framework :: Ansible
|
||||||
|
Classifier: Intended Audience :: System Administrators
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Topic :: System :: Distributed Computing
|
||||||
|
Classifier: Topic :: System :: Systems Administration
|
||||||
|
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
License-File: LICENSE
|
||||||
|
|
||||||
|
# Mitogen
|
||||||
|
|
||||||
|
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
|
||||||
|
|
||||||
|
![](https://i.imgur.com/eBM6LhJ.gif)
|
||||||
|
|
||||||
|
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
|
||||||
|
|
||||||
|
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
|
9
mitogen-0.3.9/README.md
Normal file
9
mitogen-0.3.9/README.md
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Mitogen
|
||||||
|
|
||||||
|
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
|
||||||
|
|
||||||
|
![](https://i.imgur.com/eBM6LhJ.gif)
|
||||||
|
|
||||||
|
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
|
||||||
|
|
||||||
|
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
|
0
mitogen-0.3.9/ansible_mitogen/__init__.py
Normal file
0
mitogen-0.3.9/ansible_mitogen/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/mixins.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/mixins.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/runner.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/runner.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/target.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/ansible_mitogen/__pycache__/target.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
288
mitogen-0.3.9/ansible_mitogen/affinity.py
Normal file
288
mitogen-0.3.9/ansible_mitogen/affinity.py
Normal file
@ -0,0 +1,288 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
As Mitogen separates asynchronous IO out to a broker thread, communication
|
||||||
|
necessarily involves context switching and waking that thread. When application
|
||||||
|
threads and the broker share a CPU, this can be almost invisibly fast - around
|
||||||
|
25 microseconds for a full A->B->A round-trip.
|
||||||
|
|
||||||
|
However when threads are scheduled on different CPUs, round-trip delays
|
||||||
|
regularly vary wildly, and easily into milliseconds. Many contributing factors
|
||||||
|
exist, not least scenarios like:
|
||||||
|
|
||||||
|
1. A is preempted immediately after waking B, but before releasing the GIL.
|
||||||
|
2. B wakes from IO wait only to immediately enter futex wait.
|
||||||
|
3. A may wait 10ms or more for another timeslice, as the scheduler on its CPU
|
||||||
|
runs threads unrelated to its transaction (i.e. not B), wake only to release
|
||||||
|
its GIL, before entering IO sleep waiting for a reply from B, which cannot
|
||||||
|
exist yet.
|
||||||
|
4. B wakes, acquires GIL, performs work, and sends reply to A, causing it to
|
||||||
|
wake. B is preempted before releasing GIL.
|
||||||
|
5. A wakes from IO wait only to immediately enter futex wait.
|
||||||
|
6. B may wait 10ms or more for another timeslice, wake only to release its GIL,
|
||||||
|
before sleeping again.
|
||||||
|
7. A wakes, acquires GIL, finally receives reply.
|
||||||
|
|
||||||
|
Per above if we are unlucky, on an even moderately busy machine it is possible
|
||||||
|
to lose milliseconds just in scheduling delay, and the effect is compounded
|
||||||
|
when pairs of threads in process A are communicating with pairs of threads in
|
||||||
|
process B using the same scheme, such as when Ansible WorkerProcess is
|
||||||
|
communicating with ContextService in the connection multiplexer. In the worst
|
||||||
|
case it could involve 4 threads working in lockstep spread across 4 busy CPUs.
|
||||||
|
|
||||||
|
Since multithreading in Python is essentially useless except for waiting on IO
|
||||||
|
due to the presence of the GIL, at least in Ansible there is no good reason for
|
||||||
|
threads in the same process to run on distinct CPUs - they always operate in
|
||||||
|
lockstep due to the GIL, and are thus vulnerable to issues like above.
|
||||||
|
|
||||||
|
Linux lacks any natural API to describe what we want, it only permits
|
||||||
|
individual threads to be constrained to run on specific CPUs, and for that
|
||||||
|
constraint to be inherited by new threads and forks of the constrained thread.
|
||||||
|
|
||||||
|
This module therefore implements a CPU pinning policy for Ansible processes,
|
||||||
|
providing methods that should be called early in any new process, either to
|
||||||
|
rebalance which CPU it is pinned to, or in the case of subprocesses, to remove
|
||||||
|
the pinning entirely. It is likely to require ongoing tweaking, since pinning
|
||||||
|
necessarily involves preventing the scheduler from making load balancing
|
||||||
|
decisions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import ctypes
|
||||||
|
import logging
|
||||||
|
import mmap
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.parent
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
_libc = ctypes.CDLL(None, use_errno=True)
|
||||||
|
_strerror = _libc.strerror
|
||||||
|
_strerror.restype = ctypes.c_char_p
|
||||||
|
_sem_init = _libc.sem_init
|
||||||
|
_sem_wait = _libc.sem_wait
|
||||||
|
_sem_post = _libc.sem_post
|
||||||
|
_sched_setaffinity = _libc.sched_setaffinity
|
||||||
|
except (OSError, AttributeError):
|
||||||
|
_libc = None
|
||||||
|
_strerror = None
|
||||||
|
_sem_init = None
|
||||||
|
_sem_wait = None
|
||||||
|
_sem_post = None
|
||||||
|
_sched_setaffinity = None
|
||||||
|
|
||||||
|
|
||||||
|
class sem_t(ctypes.Structure):
|
||||||
|
"""
|
||||||
|
Wrap sem_t to allow storing a lock in shared memory.
|
||||||
|
"""
|
||||||
|
_fields_ = [
|
||||||
|
('data', ctypes.c_uint8 * 128),
|
||||||
|
]
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
if _sem_init(self.data, 1, 1):
|
||||||
|
raise Exception(_strerror(ctypes.get_errno()))
|
||||||
|
|
||||||
|
def acquire(self):
|
||||||
|
if _sem_wait(self.data):
|
||||||
|
raise Exception(_strerror(ctypes.get_errno()))
|
||||||
|
|
||||||
|
def release(self):
|
||||||
|
if _sem_post(self.data):
|
||||||
|
raise Exception(_strerror(ctypes.get_errno()))
|
||||||
|
|
||||||
|
|
||||||
|
class State(ctypes.Structure):
|
||||||
|
"""
|
||||||
|
Contents of shared memory segment. This allows :meth:`Manager.assign` to be
|
||||||
|
called from any child, since affinity assignment must happen from within
|
||||||
|
the context of the new child process.
|
||||||
|
"""
|
||||||
|
_fields_ = [
|
||||||
|
('lock', sem_t),
|
||||||
|
('counter', ctypes.c_uint8),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Policy(object):
|
||||||
|
"""
|
||||||
|
Process affinity policy.
|
||||||
|
"""
|
||||||
|
def assign_controller(self):
|
||||||
|
"""
|
||||||
|
Assign the Ansible top-level policy to this process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def assign_muxprocess(self, index):
|
||||||
|
"""
|
||||||
|
Assign the MuxProcess policy to this process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def assign_worker(self):
|
||||||
|
"""
|
||||||
|
Assign the WorkerProcess policy to this process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def assign_subprocess(self):
|
||||||
|
"""
|
||||||
|
Assign the helper subprocess policy to this process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class FixedPolicy(Policy):
|
||||||
|
"""
|
||||||
|
:class:`Policy` for machines where the only control method available is
|
||||||
|
fixed CPU placement. The scheme here was tested on an otherwise idle 16
|
||||||
|
thread machine.
|
||||||
|
|
||||||
|
- The connection multiplexer is pinned to CPU 0.
|
||||||
|
- The Ansible top-level (strategy) is pinned to CPU 1.
|
||||||
|
- WorkerProcesses are pinned sequentually to 2..N, wrapping around when no
|
||||||
|
more CPUs exist.
|
||||||
|
- Children such as SSH may be scheduled on any CPU except 0/1.
|
||||||
|
|
||||||
|
If the machine has less than 4 cores available, the top-level and workers
|
||||||
|
are pinned between CPU 2..N, i.e. no CPU is reserved for the top-level
|
||||||
|
process.
|
||||||
|
|
||||||
|
This could at least be improved by having workers pinned to independent
|
||||||
|
cores, before reusing the second hyperthread of an existing core.
|
||||||
|
|
||||||
|
A hook is installed that causes :meth:`reset` to run in the child of any
|
||||||
|
process created with :func:`mitogen.parent.popen`, ensuring CPU-intensive
|
||||||
|
children like SSH are not forced to share the same core as the (otherwise
|
||||||
|
potentially very busy) parent.
|
||||||
|
"""
|
||||||
|
def __init__(self, cpu_count=None):
|
||||||
|
#: For tests.
|
||||||
|
self.cpu_count = cpu_count or multiprocessing.cpu_count()
|
||||||
|
self.mem = mmap.mmap(-1, 4096)
|
||||||
|
self.state = State.from_buffer(self.mem)
|
||||||
|
self.state.lock.init()
|
||||||
|
|
||||||
|
if self.cpu_count < 2:
|
||||||
|
# uniprocessor
|
||||||
|
self._reserve_mux = False
|
||||||
|
self._reserve_controller = False
|
||||||
|
self._reserve_mask = 0
|
||||||
|
self._reserve_shift = 0
|
||||||
|
elif self.cpu_count < 4:
|
||||||
|
# small SMP
|
||||||
|
self._reserve_mux = True
|
||||||
|
self._reserve_controller = False
|
||||||
|
self._reserve_mask = 1
|
||||||
|
self._reserve_shift = 1
|
||||||
|
else:
|
||||||
|
# big SMP
|
||||||
|
self._reserve_mux = True
|
||||||
|
self._reserve_controller = True
|
||||||
|
self._reserve_mask = 3
|
||||||
|
self._reserve_shift = 2
|
||||||
|
|
||||||
|
def _set_affinity(self, descr, mask):
|
||||||
|
if descr:
|
||||||
|
LOG.debug('CPU mask for %s: %#08x', descr, mask)
|
||||||
|
mitogen.parent._preexec_hook = self._clear
|
||||||
|
self._set_cpu_mask(mask)
|
||||||
|
|
||||||
|
def _balance(self, descr):
|
||||||
|
self.state.lock.acquire()
|
||||||
|
try:
|
||||||
|
n = self.state.counter
|
||||||
|
self.state.counter += 1
|
||||||
|
finally:
|
||||||
|
self.state.lock.release()
|
||||||
|
|
||||||
|
self._set_cpu(descr, self._reserve_shift + (
|
||||||
|
(n % (self.cpu_count - self._reserve_shift))
|
||||||
|
))
|
||||||
|
|
||||||
|
def _set_cpu(self, descr, cpu):
|
||||||
|
self._set_affinity(descr, 1 << (cpu % self.cpu_count))
|
||||||
|
|
||||||
|
def _clear(self):
|
||||||
|
all_cpus = (1 << self.cpu_count) - 1
|
||||||
|
self._set_affinity(None, all_cpus & ~self._reserve_mask)
|
||||||
|
|
||||||
|
def assign_controller(self):
|
||||||
|
if self._reserve_controller:
|
||||||
|
self._set_cpu('Ansible top-level process', 1)
|
||||||
|
else:
|
||||||
|
self._balance('Ansible top-level process')
|
||||||
|
|
||||||
|
def assign_muxprocess(self, index):
|
||||||
|
self._set_cpu('MuxProcess %d' % (index,), index)
|
||||||
|
|
||||||
|
def assign_worker(self):
|
||||||
|
self._balance('WorkerProcess')
|
||||||
|
|
||||||
|
def assign_subprocess(self):
|
||||||
|
self._clear()
|
||||||
|
|
||||||
|
|
||||||
|
class LinuxPolicy(FixedPolicy):
|
||||||
|
def _mask_to_bytes(self, mask):
|
||||||
|
"""
|
||||||
|
Convert the (type long) mask to a cpu_set_t.
|
||||||
|
"""
|
||||||
|
chunks = []
|
||||||
|
shiftmask = (2 ** 64) - 1
|
||||||
|
for x in range(16):
|
||||||
|
chunks.append(struct.pack('<Q', mask & shiftmask))
|
||||||
|
mask >>= 64
|
||||||
|
return mitogen.core.b('').join(chunks)
|
||||||
|
|
||||||
|
def _get_thread_ids(self):
|
||||||
|
try:
|
||||||
|
ents = os.listdir('/proc/self/task')
|
||||||
|
except OSError:
|
||||||
|
LOG.debug('cannot fetch thread IDs for current process')
|
||||||
|
return [os.getpid()]
|
||||||
|
|
||||||
|
return [int(s) for s in ents if s.isdigit()]
|
||||||
|
|
||||||
|
def _set_cpu_mask(self, mask):
|
||||||
|
s = self._mask_to_bytes(mask)
|
||||||
|
for tid in self._get_thread_ids():
|
||||||
|
_sched_setaffinity(tid, len(s), s)
|
||||||
|
|
||||||
|
|
||||||
|
if _sched_setaffinity is not None:
|
||||||
|
policy = LinuxPolicy()
|
||||||
|
else:
|
||||||
|
policy = Policy()
|
0
mitogen-0.3.9/ansible_mitogen/compat/__init__.py
Normal file
0
mitogen-0.3.9/ansible_mitogen/compat/__init__.py
Normal file
1134
mitogen-0.3.9/ansible_mitogen/connection.py
Normal file
1134
mitogen-0.3.9/ansible_mitogen/connection.py
Normal file
File diff suppressed because it is too large
Load Diff
103
mitogen-0.3.9/ansible_mitogen/loaders.py
Normal file
103
mitogen-0.3.9/ansible_mitogen/loaders.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Stable names for PluginLoader instances across Ansible versions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import ansible.errors
|
||||||
|
|
||||||
|
import ansible_mitogen.utils
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'action_loader',
|
||||||
|
'become_loader',
|
||||||
|
'connection_loader',
|
||||||
|
'module_loader',
|
||||||
|
'module_utils_loader',
|
||||||
|
'shell_loader',
|
||||||
|
'strategy_loader',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ANSIBLE_VERSION_MIN = (2, 10)
|
||||||
|
ANSIBLE_VERSION_MAX = (2, 17)
|
||||||
|
|
||||||
|
NEW_VERSION_MSG = (
|
||||||
|
"Your Ansible version (%s) is too recent. The most recent version\n"
|
||||||
|
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
|
||||||
|
"release notes to see if a new version is available, otherwise\n"
|
||||||
|
"subscribe to the corresponding GitHub issue to be notified when\n"
|
||||||
|
"support becomes available.\n"
|
||||||
|
"\n"
|
||||||
|
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
|
||||||
|
" https://github.com/mitogen-hq/mitogen/issues/\n"
|
||||||
|
)
|
||||||
|
OLD_VERSION_MSG = (
|
||||||
|
"Your version of Ansible (%s) is too old. The oldest version supported by "
|
||||||
|
"Mitogen for Ansible is %s."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_supported_release():
|
||||||
|
"""
|
||||||
|
Throw AnsibleError with a descriptive message in case of being loaded into
|
||||||
|
an unsupported Ansible release.
|
||||||
|
"""
|
||||||
|
v = ansible_mitogen.utils.ansible_version
|
||||||
|
if v[:2] < ANSIBLE_VERSION_MIN:
|
||||||
|
raise ansible.errors.AnsibleError(
|
||||||
|
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
|
||||||
|
)
|
||||||
|
|
||||||
|
if v[:2] > ANSIBLE_VERSION_MAX:
|
||||||
|
raise ansible.errors.AnsibleError(
|
||||||
|
NEW_VERSION_MSG % (v, ANSIBLE_VERSION_MAX)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# this is the first file our strategy plugins import, so we need to check this here
|
||||||
|
# in prior Ansible versions, connection_loader.get_with_context didn't exist, so if a user
|
||||||
|
# is trying to load an old Ansible version, we'll fail and error gracefully
|
||||||
|
assert_supported_release()
|
||||||
|
|
||||||
|
|
||||||
|
from ansible.plugins.loader import action_loader
|
||||||
|
from ansible.plugins.loader import become_loader
|
||||||
|
from ansible.plugins.loader import connection_loader
|
||||||
|
from ansible.plugins.loader import module_loader
|
||||||
|
from ansible.plugins.loader import module_utils_loader
|
||||||
|
from ansible.plugins.loader import shell_loader
|
||||||
|
from ansible.plugins.loader import strategy_loader
|
||||||
|
|
||||||
|
# These are original, unwrapped implementations
|
||||||
|
action_loader__get = action_loader.get
|
||||||
|
connection_loader__get = connection_loader.get_with_context
|
130
mitogen-0.3.9/ansible_mitogen/logging.py
Normal file
130
mitogen-0.3.9/ansible_mitogen/logging.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.utils
|
||||||
|
|
||||||
|
try:
|
||||||
|
from __main__ import display
|
||||||
|
except ImportError:
|
||||||
|
import ansible.utils.display
|
||||||
|
display = ansible.utils.display.Display()
|
||||||
|
|
||||||
|
|
||||||
|
#: The process name set via :func:`set_process_name`.
|
||||||
|
_process_name = None
|
||||||
|
|
||||||
|
#: The PID of the process that last called :func:`set_process_name`, so its
|
||||||
|
#: value can be ignored in unknown fork children.
|
||||||
|
_process_pid = None
|
||||||
|
|
||||||
|
|
||||||
|
def set_process_name(name):
|
||||||
|
"""
|
||||||
|
Set a name to adorn log messages with.
|
||||||
|
"""
|
||||||
|
global _process_name
|
||||||
|
_process_name = name
|
||||||
|
|
||||||
|
global _process_pid
|
||||||
|
_process_pid = os.getpid()
|
||||||
|
|
||||||
|
|
||||||
|
class Handler(logging.Handler):
|
||||||
|
"""
|
||||||
|
Use Mitogen's log format, but send the result to a Display method.
|
||||||
|
"""
|
||||||
|
def __init__(self, normal_method):
|
||||||
|
logging.Handler.__init__(self)
|
||||||
|
self.formatter = mitogen.utils.log_get_formatter()
|
||||||
|
self.normal_method = normal_method
|
||||||
|
|
||||||
|
#: Set of target loggers that produce warnings and errors that spam the
|
||||||
|
#: console needlessly. Their log level is forced to INFO. A better strategy
|
||||||
|
#: may simply be to bury all target logs in DEBUG output, but not by
|
||||||
|
#: overriding their log level as done here.
|
||||||
|
NOISY_LOGGERS = frozenset([
|
||||||
|
'dnf', # issue #272; warns when a package is already installed.
|
||||||
|
'boto', # issue #541; normal boto retry logic can cause ERROR logs.
|
||||||
|
])
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
mitogen_name = getattr(record, 'mitogen_name', '')
|
||||||
|
if mitogen_name == 'stderr':
|
||||||
|
record.levelno = logging.ERROR
|
||||||
|
if mitogen_name in self.NOISY_LOGGERS and record.levelno >= logging.WARNING:
|
||||||
|
record.levelno = logging.DEBUG
|
||||||
|
|
||||||
|
if _process_pid == os.getpid():
|
||||||
|
process_name = _process_name
|
||||||
|
else:
|
||||||
|
process_name = '?'
|
||||||
|
|
||||||
|
s = '[%-4s %d] %s' % (process_name, os.getpid(), self.format(record))
|
||||||
|
if record.levelno >= logging.ERROR:
|
||||||
|
display.error(s, wrap_text=False)
|
||||||
|
elif record.levelno >= logging.WARNING:
|
||||||
|
display.warning(s, formatted=True)
|
||||||
|
else:
|
||||||
|
self.normal_method(s)
|
||||||
|
|
||||||
|
|
||||||
|
def setup():
|
||||||
|
"""
|
||||||
|
Install handlers for Mitogen loggers to redirect them into the Ansible
|
||||||
|
display framework. Ansible installs its own logging framework handlers when
|
||||||
|
C.DEFAULT_LOG_PATH is set, therefore disable propagation for our handlers.
|
||||||
|
"""
|
||||||
|
l_mitogen = logging.getLogger('mitogen')
|
||||||
|
l_mitogen_io = logging.getLogger('mitogen.io')
|
||||||
|
l_ansible_mitogen = logging.getLogger('ansible_mitogen')
|
||||||
|
l_operon = logging.getLogger('operon')
|
||||||
|
|
||||||
|
for logger in l_mitogen, l_mitogen_io, l_ansible_mitogen, l_operon:
|
||||||
|
logger.handlers = [Handler(display.vvv)]
|
||||||
|
logger.propagate = False
|
||||||
|
|
||||||
|
if display.verbosity > 2:
|
||||||
|
l_ansible_mitogen.setLevel(logging.DEBUG)
|
||||||
|
l_mitogen.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
# Mitogen copies the active log level into new children, allowing them
|
||||||
|
# to filter tiny messages before they hit the network, and therefore
|
||||||
|
# before they wake the IO loop. Explicitly setting INFO saves ~4%
|
||||||
|
# running against just the local machine.
|
||||||
|
l_mitogen.setLevel(logging.ERROR)
|
||||||
|
l_ansible_mitogen.setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
if display.verbosity > 3:
|
||||||
|
l_mitogen_io.setLevel(logging.DEBUG)
|
533
mitogen-0.3.9/ansible_mitogen/mixins.py
Normal file
533
mitogen-0.3.9/ansible_mitogen/mixins.py
Normal file
@ -0,0 +1,533 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pwd
|
||||||
|
import random
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
try:
|
||||||
|
from shlex import quote as shlex_quote
|
||||||
|
except ImportError:
|
||||||
|
from pipes import quote as shlex_quote
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_bytes
|
||||||
|
from ansible.parsing.utils.jsonify import jsonify
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
import ansible.constants
|
||||||
|
import ansible.plugins
|
||||||
|
import ansible.plugins.action
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.select
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
import ansible_mitogen.planner
|
||||||
|
import ansible_mitogen.target
|
||||||
|
import ansible_mitogen.utils
|
||||||
|
import ansible_mitogen.utils.unsafe
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_text
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.utils.unsafe_proxy import wrap_var
|
||||||
|
except ImportError:
|
||||||
|
from ansible.vars.unsafe_proxy import wrap_var
|
||||||
|
|
||||||
|
try:
|
||||||
|
# ansible 2.8 moved remove_internal_keys to the clean module
|
||||||
|
from ansible.vars.clean import remove_internal_keys
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from ansible.vars.manager import remove_internal_keys
|
||||||
|
except ImportError:
|
||||||
|
# ansible 2.3.3 has remove_internal_keys as a protected func on the action class
|
||||||
|
# we'll fallback to calling self._remove_internal_keys in this case
|
||||||
|
remove_internal_keys = lambda a: "Not found"
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModuleMixin(ansible.plugins.action.ActionBase):
|
||||||
|
"""
|
||||||
|
The Mitogen-patched PluginLoader dynamically mixes this into every action
|
||||||
|
class that Ansible attempts to load. It exists to override all the
|
||||||
|
assumptions built into the base action class that should really belong in
|
||||||
|
some middle layer, or at least in the connection layer.
|
||||||
|
|
||||||
|
Functionality is defined here for:
|
||||||
|
|
||||||
|
* Capturing the final set of task variables and giving Connection a chance
|
||||||
|
to update its idea of the correct execution environment, before any
|
||||||
|
attempt is made to call a Connection method. While it's not expected for
|
||||||
|
the interpreter to change on a per-task basis, Ansible permits this, and
|
||||||
|
so it must be supported.
|
||||||
|
|
||||||
|
* Overriding lots of methods that try to call out to shell for mundane
|
||||||
|
reasons, such as copying files around, changing file permissions,
|
||||||
|
creating temporary directories and suchlike.
|
||||||
|
|
||||||
|
* Short-circuiting any use of Ansiballz or related code for executing a
|
||||||
|
module remotely using shell commands and SSH.
|
||||||
|
|
||||||
|
* Short-circuiting most of the logic in dealing with the fact that Ansible
|
||||||
|
always runs become: tasks across at least the SSH user account and the
|
||||||
|
destination user account, and handling the security permission issues
|
||||||
|
that crop up due to this. Mitogen always runs a task completely within
|
||||||
|
the target user account, so it's not a problem for us.
|
||||||
|
"""
|
||||||
|
def __init__(self, task, connection, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Verify the received connection is really a Mitogen connection. If not,
|
||||||
|
transmute this instance back into the original unadorned base class.
|
||||||
|
|
||||||
|
This allows running the Mitogen strategy in mixed-target playbooks,
|
||||||
|
where some targets use SSH while others use WinRM or some fancier UNIX
|
||||||
|
connection plug-in. That's because when the Mitogen strategy is active,
|
||||||
|
ActionModuleMixin is unconditionally mixed into any action module that
|
||||||
|
is instantiated, and there is no direct way for the monkey-patch to
|
||||||
|
know what kind of connection will be used upfront.
|
||||||
|
"""
|
||||||
|
super(ActionModuleMixin, self).__init__(task, connection, *args, **kwargs)
|
||||||
|
if not isinstance(connection, ansible_mitogen.connection.Connection):
|
||||||
|
_, self.__class__ = type(self).__bases__
|
||||||
|
|
||||||
|
# required for python interpreter discovery
|
||||||
|
connection.templar = self._templar
|
||||||
|
self._finding_python_interpreter = False
|
||||||
|
self._rediscovered_python = False
|
||||||
|
# redeclaring interpreter discovery vars here in case running ansible < 2.8.0
|
||||||
|
self._discovered_interpreter_key = None
|
||||||
|
self._discovered_interpreter = False
|
||||||
|
self._discovery_deprecation_warnings = []
|
||||||
|
self._discovery_warnings = []
|
||||||
|
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
"""
|
||||||
|
Override run() to notify Connection of task-specific data, so it has a
|
||||||
|
chance to know e.g. the Python interpreter in use.
|
||||||
|
"""
|
||||||
|
self._connection.on_action_run(
|
||||||
|
task_vars=task_vars,
|
||||||
|
delegate_to_hostname=self._task.delegate_to,
|
||||||
|
loader_basedir=self._loader.get_basedir(),
|
||||||
|
)
|
||||||
|
return super(ActionModuleMixin, self).run(tmp, task_vars)
|
||||||
|
|
||||||
|
COMMAND_RESULT = {
|
||||||
|
'rc': 0,
|
||||||
|
'stdout': '',
|
||||||
|
'stdout_lines': [],
|
||||||
|
'stderr': ''
|
||||||
|
}
|
||||||
|
|
||||||
|
def fake_shell(self, func, stdout=False):
|
||||||
|
"""
|
||||||
|
Execute a function and decorate its return value in the style of
|
||||||
|
_low_level_execute_command(). This produces a return value that looks
|
||||||
|
like some shell command was run, when really func() was implemented
|
||||||
|
entirely in Python.
|
||||||
|
|
||||||
|
If the function raises :py:class:`mitogen.core.CallError`, this will be
|
||||||
|
translated into a failed shell command with a non-zero exit status.
|
||||||
|
|
||||||
|
:param func:
|
||||||
|
Function invoked as `func()`.
|
||||||
|
:returns:
|
||||||
|
See :py:attr:`COMMAND_RESULT`.
|
||||||
|
"""
|
||||||
|
dct = self.COMMAND_RESULT.copy()
|
||||||
|
try:
|
||||||
|
rc = func()
|
||||||
|
if stdout:
|
||||||
|
dct['stdout'] = repr(rc)
|
||||||
|
except mitogen.core.CallError:
|
||||||
|
LOG.exception('While emulating a shell command')
|
||||||
|
dct['rc'] = 1
|
||||||
|
dct['stderr'] = traceback.format_exc()
|
||||||
|
|
||||||
|
return dct
|
||||||
|
|
||||||
|
def _remote_file_exists(self, path):
|
||||||
|
"""
|
||||||
|
Determine if `path` exists by directly invoking os.path.exists() in the
|
||||||
|
target user account.
|
||||||
|
"""
|
||||||
|
LOG.debug('_remote_file_exists(%r)', path)
|
||||||
|
return self._connection.get_chain().call(
|
||||||
|
ansible_mitogen.target.file_exists,
|
||||||
|
ansible_mitogen.utils.unsafe.cast(path)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _configure_module(self, module_name, module_args, task_vars=None):
|
||||||
|
"""
|
||||||
|
Mitogen does not use the Ansiballz framework. This call should never
|
||||||
|
happen when ActionMixin is active, so crash if it does.
|
||||||
|
"""
|
||||||
|
assert False, "_configure_module() should never be called."
|
||||||
|
|
||||||
|
def _is_pipelining_enabled(self, module_style, wrap_async=False):
|
||||||
|
"""
|
||||||
|
Mitogen does not use SSH pipelining. This call should never happen when
|
||||||
|
ActionMixin is active, so crash if it does.
|
||||||
|
"""
|
||||||
|
assert False, "_is_pipelining_enabled() should never be called."
|
||||||
|
|
||||||
|
def _generate_tmp_path(self):
|
||||||
|
return os.path.join(
|
||||||
|
self._connection.get_good_temp_dir(),
|
||||||
|
'ansible_mitogen_action_%016x' % (
|
||||||
|
random.getrandbits(8*8),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _make_tmp_path(self, remote_user=None):
|
||||||
|
"""
|
||||||
|
Create a temporary subdirectory as a child of the temporary directory
|
||||||
|
managed by the remote interpreter.
|
||||||
|
"""
|
||||||
|
LOG.debug('_make_tmp_path(remote_user=%r)', remote_user)
|
||||||
|
path = self._generate_tmp_path()
|
||||||
|
LOG.debug('Temporary directory: %r', path)
|
||||||
|
self._connection.get_chain().call_no_reply(os.mkdir, path)
|
||||||
|
self._connection._shell.tmpdir = path
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _remove_tmp_path(self, tmp_path):
|
||||||
|
"""
|
||||||
|
Replace the base implementation's invocation of rm -rf, replacing it
|
||||||
|
with a pipelined call to :func:`ansible_mitogen.target.prune_tree`.
|
||||||
|
"""
|
||||||
|
LOG.debug('_remove_tmp_path(%r)', tmp_path)
|
||||||
|
if tmp_path is None and ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
|
||||||
|
tmp_path = self._connection._shell.tmpdir # 06f73ad578d
|
||||||
|
if tmp_path is not None:
|
||||||
|
self._connection.get_chain().call_no_reply(
|
||||||
|
ansible_mitogen.target.prune_tree,
|
||||||
|
tmp_path,
|
||||||
|
)
|
||||||
|
self._connection._shell.tmpdir = None
|
||||||
|
|
||||||
|
def _transfer_data(self, remote_path, data):
|
||||||
|
"""
|
||||||
|
Used by the base _execute_module(), and in <2.4 also by the template
|
||||||
|
action module, and probably others.
|
||||||
|
"""
|
||||||
|
if isinstance(data, dict):
|
||||||
|
data = jsonify(data)
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
data = to_bytes(data, errors='surrogate_or_strict')
|
||||||
|
|
||||||
|
LOG.debug('_transfer_data(%r, %s ..%d bytes)',
|
||||||
|
remote_path, type(data), len(data))
|
||||||
|
self._connection.put_data(remote_path, data)
|
||||||
|
return remote_path
|
||||||
|
|
||||||
|
#: Actions listed here cause :func:`_fixup_perms2` to avoid a needless
|
||||||
|
#: roundtrip, as they modify file modes separately afterwards. This is due
|
||||||
|
#: to the method prototype having a default of `execute=True`.
|
||||||
|
FIXUP_PERMS_RED_HERRING = set(['copy'])
|
||||||
|
|
||||||
|
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
|
||||||
|
"""
|
||||||
|
Mitogen always executes ActionBase helper methods in the context of the
|
||||||
|
target user account, so it is never necessary to modify permissions
|
||||||
|
except to ensure the execute bit is set if requested.
|
||||||
|
"""
|
||||||
|
LOG.debug('_fixup_perms2(%r, remote_user=%r, execute=%r)',
|
||||||
|
remote_paths, remote_user, execute)
|
||||||
|
if execute and self._task.action not in self.FIXUP_PERMS_RED_HERRING:
|
||||||
|
return self._remote_chmod(remote_paths, mode='u+x')
|
||||||
|
return self.COMMAND_RESULT.copy()
|
||||||
|
|
||||||
|
def _remote_chmod(self, paths, mode, sudoable=False):
|
||||||
|
"""
|
||||||
|
Issue an asynchronous set_file_mode() call for every path in `paths`,
|
||||||
|
then format the resulting return value list with fake_shell().
|
||||||
|
"""
|
||||||
|
LOG.debug('_remote_chmod(%r, mode=%r, sudoable=%r)',
|
||||||
|
paths, mode, sudoable)
|
||||||
|
return self.fake_shell(lambda: mitogen.select.Select.all(
|
||||||
|
self._connection.get_chain().call_async(
|
||||||
|
ansible_mitogen.target.set_file_mode, path, mode
|
||||||
|
)
|
||||||
|
for path in paths
|
||||||
|
))
|
||||||
|
|
||||||
|
def _remote_chown(self, paths, user, sudoable=False):
|
||||||
|
"""
|
||||||
|
Issue an asynchronous os.chown() call for every path in `paths`, then
|
||||||
|
format the resulting return value list with fake_shell().
|
||||||
|
"""
|
||||||
|
LOG.debug('_remote_chown(%r, user=%r, sudoable=%r)',
|
||||||
|
paths, user, sudoable)
|
||||||
|
ent = self._connection.get_chain().call(pwd.getpwnam, user)
|
||||||
|
return self.fake_shell(lambda: mitogen.select.Select.all(
|
||||||
|
self._connection.get_chain().call_async(
|
||||||
|
os.chown, path, ent.pw_uid, ent.pw_gid
|
||||||
|
)
|
||||||
|
for path in paths
|
||||||
|
))
|
||||||
|
|
||||||
|
def _remote_expand_user(self, path, sudoable=True):
|
||||||
|
"""
|
||||||
|
Replace the base implementation's attempt to emulate
|
||||||
|
os.path.expanduser() with an actual call to os.path.expanduser().
|
||||||
|
|
||||||
|
:param bool sudoable:
|
||||||
|
If :data:`True`, indicate unqualified tilde ("~" with no username)
|
||||||
|
should be evaluated in the context of the login account, not any
|
||||||
|
become_user.
|
||||||
|
"""
|
||||||
|
LOG.debug('_remote_expand_user(%r, sudoable=%r)', path, sudoable)
|
||||||
|
if not path.startswith('~'):
|
||||||
|
# /home/foo -> /home/foo
|
||||||
|
return path
|
||||||
|
if sudoable or not self._play_context.become:
|
||||||
|
if path == '~':
|
||||||
|
# ~ -> /home/dmw
|
||||||
|
return self._connection.homedir
|
||||||
|
if path.startswith('~/'):
|
||||||
|
# ~/.ansible -> /home/dmw/.ansible
|
||||||
|
return os.path.join(self._connection.homedir, path[2:])
|
||||||
|
# ~root/.ansible -> /root/.ansible
|
||||||
|
return self._connection.get_chain(use_login=(not sudoable)).call(
|
||||||
|
os.path.expanduser,
|
||||||
|
ansible_mitogen.utils.unsafe.cast(path),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_task_timeout_secs(self):
|
||||||
|
"""
|
||||||
|
Return the task "async:" value, portable across 2.4-2.5.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self._task.async_val
|
||||||
|
except AttributeError:
|
||||||
|
return getattr(self._task, 'async')
|
||||||
|
|
||||||
|
def _set_temp_file_args(self, module_args, wrap_async):
|
||||||
|
# Ansible>2.5 module_utils reuses the action's temporary directory if
|
||||||
|
# one exists. Older versions error if this key is present.
|
||||||
|
if ansible_mitogen.utils.ansible_version[:2] >= (2, 5):
|
||||||
|
if wrap_async:
|
||||||
|
# Sharing is not possible with async tasks, as in that case,
|
||||||
|
# the directory must outlive the action plug-in.
|
||||||
|
module_args['_ansible_tmpdir'] = None
|
||||||
|
else:
|
||||||
|
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
|
||||||
|
|
||||||
|
# If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use
|
||||||
|
# _ansible_remote_tmp as the location to create the module's temporary
|
||||||
|
# directory. Older versions error if this key is present.
|
||||||
|
if ansible_mitogen.utils.ansible_version[:2] >= (2, 6):
|
||||||
|
module_args['_ansible_remote_tmp'] = (
|
||||||
|
self._connection.get_good_temp_dir()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _execute_module(self, module_name=None, module_args=None, tmp=None,
|
||||||
|
task_vars=None, persist_files=False,
|
||||||
|
delete_remote_tmp=True, wrap_async=False,
|
||||||
|
ignore_unknown_opts=False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Collect up a module's execution environment then use it to invoke
|
||||||
|
target.run_module() or helpers.run_module_async() in the target
|
||||||
|
context.
|
||||||
|
"""
|
||||||
|
if module_name is None:
|
||||||
|
module_name = self._task.action
|
||||||
|
if module_args is None:
|
||||||
|
module_args = self._task.args
|
||||||
|
if task_vars is None:
|
||||||
|
task_vars = {}
|
||||||
|
|
||||||
|
if ansible_mitogen.utils.ansible_version[:2] >= (2, 17):
|
||||||
|
self._update_module_args(
|
||||||
|
module_name, module_args, task_vars,
|
||||||
|
ignore_unknown_opts=ignore_unknown_opts,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._update_module_args(module_name, module_args, task_vars)
|
||||||
|
env = {}
|
||||||
|
self._compute_environment_string(env)
|
||||||
|
self._set_temp_file_args(module_args, wrap_async)
|
||||||
|
|
||||||
|
# there's a case where if a task shuts down the node and then immediately calls
|
||||||
|
# wait_for_connection, the `ping` test from Ansible won't pass because we lost connection
|
||||||
|
# clearing out context forces a reconnect
|
||||||
|
# see https://github.com/dw/mitogen/issues/655 and Ansible's `wait_for_connection` module for more info
|
||||||
|
if module_name == 'ansible.legacy.ping' and type(self).__name__ == 'wait_for_connection':
|
||||||
|
self._connection.context = None
|
||||||
|
|
||||||
|
self._connection._connect()
|
||||||
|
result = ansible_mitogen.planner.invoke(
|
||||||
|
ansible_mitogen.planner.Invocation(
|
||||||
|
action=self,
|
||||||
|
connection=self._connection,
|
||||||
|
module_name=ansible_mitogen.utils.unsafe.cast(mitogen.core.to_text(module_name)),
|
||||||
|
module_args=ansible_mitogen.utils.unsafe.cast(module_args),
|
||||||
|
task_vars=task_vars,
|
||||||
|
templar=self._templar,
|
||||||
|
env=ansible_mitogen.utils.unsafe.cast(env),
|
||||||
|
wrap_async=wrap_async,
|
||||||
|
timeout_secs=self.get_task_timeout_secs(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if tmp and delete_remote_tmp and ansible_mitogen.utils.ansible_version[:2] < (2, 5):
|
||||||
|
# Built-in actions expected tmpdir to be cleaned up automatically
|
||||||
|
# on _execute_module().
|
||||||
|
self._remove_tmp_path(tmp)
|
||||||
|
|
||||||
|
# prevents things like discovered_interpreter_* or ansible_discovered_interpreter_* from being set
|
||||||
|
# handle ansible 2.3.3 that has remove_internal_keys in a different place
|
||||||
|
check = remove_internal_keys(result)
|
||||||
|
if check == 'Not found':
|
||||||
|
self._remove_internal_keys(result)
|
||||||
|
|
||||||
|
# taken from _execute_module of ansible 2.8.6
|
||||||
|
# propagate interpreter discovery results back to the controller
|
||||||
|
if self._discovered_interpreter_key:
|
||||||
|
if result.get('ansible_facts') is None:
|
||||||
|
result['ansible_facts'] = {}
|
||||||
|
|
||||||
|
# only cache discovered_interpreter if we're not running a rediscovery
|
||||||
|
# rediscovery happens in places like docker connections that could have different
|
||||||
|
# python interpreters than the main host
|
||||||
|
if not self._rediscovered_python:
|
||||||
|
result['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
|
||||||
|
|
||||||
|
if self._discovery_warnings:
|
||||||
|
if result.get('warnings') is None:
|
||||||
|
result['warnings'] = []
|
||||||
|
result['warnings'].extend(self._discovery_warnings)
|
||||||
|
|
||||||
|
if self._discovery_deprecation_warnings:
|
||||||
|
if result.get('deprecations') is None:
|
||||||
|
result['deprecations'] = []
|
||||||
|
result['deprecations'].extend(self._discovery_deprecation_warnings)
|
||||||
|
|
||||||
|
return wrap_var(result)
|
||||||
|
|
||||||
|
def _postprocess_response(self, result):
|
||||||
|
"""
|
||||||
|
Apply fixups mimicking ActionBase._execute_module(); this is copied
|
||||||
|
verbatim from action/__init__.py, the guts of _parse_returned_data are
|
||||||
|
garbage and should be removed or reimplemented once tests exist.
|
||||||
|
|
||||||
|
:param dict result:
|
||||||
|
Dictionary with format::
|
||||||
|
|
||||||
|
{
|
||||||
|
"rc": int,
|
||||||
|
"stdout": "stdout data",
|
||||||
|
"stderr": "stderr data"
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
data = self._parse_returned_data(result)
|
||||||
|
|
||||||
|
# Cutpasted from the base implementation.
|
||||||
|
if 'stdout' in data and 'stdout_lines' not in data:
|
||||||
|
data['stdout_lines'] = (data['stdout'] or u'').splitlines()
|
||||||
|
if 'stderr' in data and 'stderr_lines' not in data:
|
||||||
|
data['stderr_lines'] = (data['stderr'] or u'').splitlines()
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None,
|
||||||
|
executable=None,
|
||||||
|
encoding_errors='surrogate_then_replace',
|
||||||
|
chdir=None):
|
||||||
|
"""
|
||||||
|
Override the base implementation by simply calling
|
||||||
|
target.exec_command() in the target context.
|
||||||
|
"""
|
||||||
|
LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)',
|
||||||
|
cmd, type(in_data), executable, chdir)
|
||||||
|
|
||||||
|
if executable is None: # executable defaults to False
|
||||||
|
executable = self._play_context.executable
|
||||||
|
if executable:
|
||||||
|
cmd = executable + ' -c ' + shlex_quote(cmd)
|
||||||
|
|
||||||
|
# TODO: HACK: if finding python interpreter then we need to keep
|
||||||
|
# calling exec_command until we run into the right python we'll use
|
||||||
|
# chicken-and-egg issue, mitogen needs a python to run low_level_execute_command
|
||||||
|
# which is required by Ansible's discover_interpreter function
|
||||||
|
if self._finding_python_interpreter:
|
||||||
|
possible_pythons = [
|
||||||
|
'/usr/bin/python',
|
||||||
|
'python3',
|
||||||
|
'python3.7',
|
||||||
|
'python3.6',
|
||||||
|
'python3.5',
|
||||||
|
'python2.7',
|
||||||
|
'python2.6',
|
||||||
|
'/usr/libexec/platform-python',
|
||||||
|
'/usr/bin/python3',
|
||||||
|
'python'
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
# not used, just adding a filler value
|
||||||
|
possible_pythons = ['python']
|
||||||
|
|
||||||
|
def _run_cmd():
|
||||||
|
return self._connection.exec_command(
|
||||||
|
cmd=cmd,
|
||||||
|
in_data=in_data,
|
||||||
|
sudoable=sudoable,
|
||||||
|
mitogen_chdir=chdir,
|
||||||
|
)
|
||||||
|
|
||||||
|
for possible_python in possible_pythons:
|
||||||
|
try:
|
||||||
|
self._possible_python_interpreter = possible_python
|
||||||
|
rc, stdout, stderr = _run_cmd()
|
||||||
|
# TODO: what exception is thrown?
|
||||||
|
except:
|
||||||
|
# we've reached the last python attempted and failed
|
||||||
|
# TODO: could use enumerate(), need to check which version of python first had it though
|
||||||
|
if possible_python == 'python':
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
stdout_text = to_text(stdout, errors=encoding_errors)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'rc': rc,
|
||||||
|
'stdout': stdout_text,
|
||||||
|
'stdout_lines': stdout_text.splitlines(),
|
||||||
|
'stderr': stderr,
|
||||||
|
}
|
281
mitogen-0.3.9/ansible_mitogen/module_finder.py
Normal file
281
mitogen-0.3.9/ansible_mitogen/module_finder.py
Normal file
@ -0,0 +1,281 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Python >= 3.4, PEP 451 ModuleSpec API
|
||||||
|
import importlib.machinery
|
||||||
|
import importlib.util
|
||||||
|
except ImportError:
|
||||||
|
# Python < 3.4, PEP 302 Import Hooks
|
||||||
|
import imp
|
||||||
|
|
||||||
|
import mitogen.master
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
PREFIX = 'ansible.module_utils.'
|
||||||
|
|
||||||
|
|
||||||
|
# Analog of `importlib.machinery.ModuleSpec` or `pkgutil.ModuleInfo`.
|
||||||
|
# name Unqualified name of the module.
|
||||||
|
# path Filesystem path of the module.
|
||||||
|
# kind One of the constants in `imp`, as returned in `imp.find_module()`
|
||||||
|
# parent `ansible_mitogen.module_finder.Module` of parent package (if any).
|
||||||
|
Module = collections.namedtuple('Module', 'name path kind parent')
|
||||||
|
|
||||||
|
|
||||||
|
def get_fullname(module):
|
||||||
|
"""
|
||||||
|
Reconstruct a Module's canonical path by recursing through its parents.
|
||||||
|
"""
|
||||||
|
bits = [str(module.name)]
|
||||||
|
while module.parent:
|
||||||
|
bits.append(str(module.parent.name))
|
||||||
|
module = module.parent
|
||||||
|
return '.'.join(reversed(bits))
|
||||||
|
|
||||||
|
|
||||||
|
def get_code(module):
|
||||||
|
"""
|
||||||
|
Compile and return a Module's code object.
|
||||||
|
"""
|
||||||
|
fp = open(module.path, 'rb')
|
||||||
|
try:
|
||||||
|
return compile(fp.read(), str(module.name), 'exec')
|
||||||
|
finally:
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
|
||||||
|
def is_pkg(module):
|
||||||
|
"""
|
||||||
|
Return :data:`True` if a Module represents a package.
|
||||||
|
"""
|
||||||
|
return module.kind == imp.PKG_DIRECTORY
|
||||||
|
|
||||||
|
|
||||||
|
def find(name, path=(), parent=None):
|
||||||
|
"""
|
||||||
|
Return a Module instance describing the first matching module found on the
|
||||||
|
search path.
|
||||||
|
|
||||||
|
:param str name:
|
||||||
|
Module name.
|
||||||
|
:param list path:
|
||||||
|
List of directory names to search for the module.
|
||||||
|
:param Module parent:
|
||||||
|
Optional module parent.
|
||||||
|
"""
|
||||||
|
assert isinstance(path, tuple)
|
||||||
|
head, _, tail = name.partition('.')
|
||||||
|
try:
|
||||||
|
tup = imp.find_module(head, list(path))
|
||||||
|
except ImportError:
|
||||||
|
return parent
|
||||||
|
|
||||||
|
fp, modpath, (suffix, mode, kind) = tup
|
||||||
|
if fp:
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
if parent and modpath == parent.path:
|
||||||
|
# 'from timeout import timeout', where 'timeout' is a function but also
|
||||||
|
# the name of the module being imported.
|
||||||
|
return None
|
||||||
|
|
||||||
|
if kind == imp.PKG_DIRECTORY:
|
||||||
|
modpath = os.path.join(modpath, '__init__.py')
|
||||||
|
|
||||||
|
module = Module(head, modpath, kind, parent)
|
||||||
|
# TODO: this code is entirely wrong on Python 3.x, but works well enough
|
||||||
|
# for Ansible. We need a new find_child() that only looks in the package
|
||||||
|
# directory, never falling back to the parent search path.
|
||||||
|
if tail and kind == imp.PKG_DIRECTORY:
|
||||||
|
return find_relative(module, tail, path)
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def find_relative(parent, name, path=()):
|
||||||
|
if parent.kind == imp.PKG_DIRECTORY:
|
||||||
|
path = (os.path.dirname(parent.path),) + path
|
||||||
|
return find(name, path, parent=parent)
|
||||||
|
|
||||||
|
|
||||||
|
def scan_fromlist(code):
|
||||||
|
"""Return an iterator of (level, name) for explicit imports in a code
|
||||||
|
object.
|
||||||
|
|
||||||
|
Not all names identify a module. `from os import name, path` generates
|
||||||
|
`(0, 'os.name'), (0, 'os.path')`, but `os.name` is usually a string.
|
||||||
|
|
||||||
|
>>> src = 'import a; import b.c; from d.e import f; from g import h, i\\n'
|
||||||
|
>>> code = compile(src, '<str>', 'exec')
|
||||||
|
>>> list(scan_fromlist(code))
|
||||||
|
[(0, 'a'), (0, 'b.c'), (0, 'd.e.f'), (0, 'g.h'), (0, 'g.i')]
|
||||||
|
"""
|
||||||
|
for level, modname_s, fromlist in mitogen.master.scan_code_imports(code):
|
||||||
|
for name in fromlist:
|
||||||
|
yield level, str('%s.%s' % (modname_s, name))
|
||||||
|
if not fromlist:
|
||||||
|
yield level, modname_s
|
||||||
|
|
||||||
|
|
||||||
|
def walk_imports(code, prefix=None):
|
||||||
|
"""Return an iterator of names for implicit parent imports & explicit
|
||||||
|
imports in a code object.
|
||||||
|
|
||||||
|
If a prefix is provided, then only children of that prefix are included.
|
||||||
|
Not all names identify a module. `from os import name, path` generates
|
||||||
|
`'os', 'os.name', 'os.path'`, but `os.name` is usually a string.
|
||||||
|
|
||||||
|
>>> source = 'import a; import b; import b.c; from b.d import e, f\\n'
|
||||||
|
>>> code = compile(source, '<str>', 'exec')
|
||||||
|
>>> list(walk_imports(code))
|
||||||
|
['a', 'b', 'b', 'b.c', 'b', 'b.d', 'b.d.e', 'b.d.f']
|
||||||
|
>>> list(walk_imports(code, prefix='b'))
|
||||||
|
['b.c', 'b.d', 'b.d.e', 'b.d.f']
|
||||||
|
"""
|
||||||
|
if prefix is None:
|
||||||
|
prefix = ''
|
||||||
|
pattern = re.compile(r'(^|\.)(\w+)')
|
||||||
|
start = len(prefix)
|
||||||
|
for _, name, fromlist in mitogen.master.scan_code_imports(code):
|
||||||
|
if not name.startswith(prefix):
|
||||||
|
continue
|
||||||
|
for match in pattern.finditer(name, start):
|
||||||
|
yield name[:match.end()]
|
||||||
|
for leaf in fromlist:
|
||||||
|
yield str('%s.%s' % (name, leaf))
|
||||||
|
|
||||||
|
|
||||||
|
def scan(module_name, module_path, search_path):
|
||||||
|
# type: (str, str, list[str]) -> list[(str, str, bool)]
|
||||||
|
"""Return a list of (name, path, is_package) for ansible.module_utils
|
||||||
|
imports used by an Ansible module.
|
||||||
|
"""
|
||||||
|
log = LOG.getChild('scan')
|
||||||
|
log.debug('%r, %r, %r', module_name, module_path, search_path)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 4):
|
||||||
|
result = _scan_importlib_find_spec(
|
||||||
|
module_name, module_path, search_path,
|
||||||
|
)
|
||||||
|
log.debug('_scan_importlib_find_spec %r', result)
|
||||||
|
else:
|
||||||
|
result = _scan_imp_find_module(module_name, module_path, search_path)
|
||||||
|
log.debug('_scan_imp_find_module %r', result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _scan_importlib_find_spec(module_name, module_path, search_path):
|
||||||
|
# type: (str, str, list[str]) -> list[(str, str, bool)]
|
||||||
|
module = importlib.machinery.ModuleSpec(
|
||||||
|
module_name, loader=None, origin=module_path,
|
||||||
|
)
|
||||||
|
prefix = importlib.machinery.ModuleSpec(
|
||||||
|
PREFIX.rstrip('.'), loader=None,
|
||||||
|
)
|
||||||
|
prefix.submodule_search_locations = search_path
|
||||||
|
queue = collections.deque([module])
|
||||||
|
specs = {prefix.name: prefix}
|
||||||
|
while queue:
|
||||||
|
spec = queue.popleft()
|
||||||
|
if spec.origin is None:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
with open(spec.origin, 'rb') as f:
|
||||||
|
code = compile(f.read(), spec.name, 'exec')
|
||||||
|
except Exception as exc:
|
||||||
|
raise ValueError((exc, module, spec, specs))
|
||||||
|
|
||||||
|
for name in walk_imports(code, prefix.name):
|
||||||
|
if name in specs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
parent_name = name.rpartition('.')[0]
|
||||||
|
parent = specs[parent_name]
|
||||||
|
if parent is None or not parent.submodule_search_locations:
|
||||||
|
specs[name] = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
child = importlib.util._find_spec(
|
||||||
|
name, parent.submodule_search_locations,
|
||||||
|
)
|
||||||
|
if child is None or child.origin is None:
|
||||||
|
specs[name] = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
specs[name] = child
|
||||||
|
queue.append(child)
|
||||||
|
|
||||||
|
del specs[prefix.name]
|
||||||
|
return sorted(
|
||||||
|
(spec.name, spec.origin, spec.submodule_search_locations is not None)
|
||||||
|
for spec in specs.values() if spec is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _scan_imp_find_module(module_name, module_path, search_path):
|
||||||
|
# type: (str, str, list[str]) -> list[(str, str, bool)]
|
||||||
|
module = Module(module_name, module_path, imp.PY_SOURCE, None)
|
||||||
|
stack = [module]
|
||||||
|
seen = set()
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
module = stack.pop(0)
|
||||||
|
for level, fromname in scan_fromlist(get_code(module)):
|
||||||
|
if not fromname.startswith(PREFIX):
|
||||||
|
continue
|
||||||
|
|
||||||
|
imported = find(fromname[len(PREFIX):], search_path)
|
||||||
|
if imported is None or imported in seen:
|
||||||
|
continue
|
||||||
|
|
||||||
|
seen.add(imported)
|
||||||
|
stack.append(imported)
|
||||||
|
parent = imported.parent
|
||||||
|
while parent:
|
||||||
|
fullname = get_fullname(parent)
|
||||||
|
module = Module(fullname, parent.path, parent.kind, None)
|
||||||
|
if module not in seen:
|
||||||
|
seen.add(module)
|
||||||
|
stack.append(module)
|
||||||
|
parent = parent.parent
|
||||||
|
|
||||||
|
return sorted(
|
||||||
|
(PREFIX + get_fullname(module), module.path, is_pkg(module))
|
||||||
|
for module in seen
|
||||||
|
)
|
77
mitogen-0.3.9/ansible_mitogen/parsing.py
Normal file
77
mitogen-0.3.9/ansible_mitogen/parsing.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
|
||||||
|
|
||||||
|
def parse_script_interpreter(source):
|
||||||
|
"""
|
||||||
|
Parse the script interpreter portion of a UNIX hashbang using the rules
|
||||||
|
Linux uses.
|
||||||
|
|
||||||
|
:param str source: String like "/usr/bin/env python".
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
||||||
|
interpreter and `arg` is its sole argument if present, otherwise
|
||||||
|
:py:data:`None`.
|
||||||
|
"""
|
||||||
|
# Find terminating newline. Assume last byte of binprm_buf if absent.
|
||||||
|
nl = source.find(b'\n', 0, 128)
|
||||||
|
if nl == -1:
|
||||||
|
nl = min(128, len(source))
|
||||||
|
|
||||||
|
# Split once on the first run of whitespace. If no whitespace exists,
|
||||||
|
# bits just contains the interpreter filename.
|
||||||
|
bits = source[0:nl].strip().split(None, 1)
|
||||||
|
if len(bits) == 1:
|
||||||
|
return mitogen.core.to_text(bits[0]), None
|
||||||
|
return mitogen.core.to_text(bits[0]), mitogen.core.to_text(bits[1])
|
||||||
|
|
||||||
|
|
||||||
|
def parse_hashbang(source):
|
||||||
|
"""
|
||||||
|
Parse a UNIX "hashbang line" using the syntax supported by Linux.
|
||||||
|
|
||||||
|
:param str source: String like "#!/usr/bin/env python".
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
||||||
|
interpreter and `arg` is its sole argument if present, otherwise
|
||||||
|
:py:data:`None`.
|
||||||
|
"""
|
||||||
|
# Linux requires first 2 bytes with no whitespace, pretty sure it's the
|
||||||
|
# same everywhere. See binfmt_script.c.
|
||||||
|
if not source.startswith(b'#!'):
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
return parse_script_interpreter(source[2:])
|
631
mitogen-0.3.9/ansible_mitogen/planner.py
Normal file
631
mitogen-0.3.9/ansible_mitogen/planner.py
Normal file
@ -0,0 +1,631 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Classes to detect each case from [0] and prepare arguments necessary for the
|
||||||
|
corresponding Runner class within the target, including preloading requisite
|
||||||
|
files/modules known missing.
|
||||||
|
|
||||||
|
[0] "Ansible Module Architecture", developing_program_flow_modules.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
|
||||||
|
import ansible.collections.list
|
||||||
|
import ansible.errors
|
||||||
|
import ansible.executor.module_common
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.select
|
||||||
|
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.parsing
|
||||||
|
import ansible_mitogen.target
|
||||||
|
import ansible_mitogen.utils.unsafe
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
|
||||||
|
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
|
||||||
|
# NOTE: Ansible 2.10 no longer has a `.` at the end of NO_MODULE_MSG error
|
||||||
|
NO_MODULE_MSG = 'The module %s was not found in configured module paths'
|
||||||
|
|
||||||
|
_planner_by_path = {}
|
||||||
|
|
||||||
|
|
||||||
|
class Invocation(object):
|
||||||
|
"""
|
||||||
|
Collect up a module's execution environment then use it to invoke
|
||||||
|
target.run_module() or helpers.run_module_async() in the target context.
|
||||||
|
"""
|
||||||
|
def __init__(self, action, connection, module_name, module_args,
|
||||||
|
task_vars, templar, env, wrap_async, timeout_secs):
|
||||||
|
#: ActionBase instance invoking the module. Required to access some
|
||||||
|
#: output postprocessing methods that don't belong in ActionBase at
|
||||||
|
#: all.
|
||||||
|
self.action = action
|
||||||
|
#: Ansible connection to use to contact the target. Must be an
|
||||||
|
#: ansible_mitogen connection.
|
||||||
|
self.connection = connection
|
||||||
|
#: Name of the module ('command', 'shell', etc.) to execute.
|
||||||
|
self.module_name = module_name
|
||||||
|
#: Final module arguments.
|
||||||
|
self.module_args = module_args
|
||||||
|
#: Task variables, needed to extract ansible_*_interpreter.
|
||||||
|
self.task_vars = task_vars
|
||||||
|
#: Templar, needed to extract ansible_*_interpreter.
|
||||||
|
self.templar = templar
|
||||||
|
#: Final module environment.
|
||||||
|
self.env = env
|
||||||
|
#: Boolean, if :py:data:`True`, launch the module asynchronously.
|
||||||
|
self.wrap_async = wrap_async
|
||||||
|
#: Integer, if >0, limit the time an asynchronous job may run for.
|
||||||
|
self.timeout_secs = timeout_secs
|
||||||
|
#: Initially ``None``, but set by :func:`invoke`. The path on the
|
||||||
|
#: master to the module's implementation file.
|
||||||
|
self.module_path = None
|
||||||
|
#: Initially ``None``, but set by :func:`invoke`. The raw source or
|
||||||
|
#: binary contents of the module.
|
||||||
|
self._module_source = None
|
||||||
|
#: Initially ``{}``, but set by :func:`invoke`. Optional source to send
|
||||||
|
#: to :func:`propagate_paths_and_modules` to fix Python3.5 relative import errors
|
||||||
|
self._overridden_sources = {}
|
||||||
|
#: Initially ``set()``, but set by :func:`invoke`. Optional source paths to send
|
||||||
|
#: to :func:`propagate_paths_and_modules` to handle loading source dependencies from
|
||||||
|
#: places outside of the main source path, such as collections
|
||||||
|
self._extra_sys_paths = set()
|
||||||
|
|
||||||
|
def get_module_source(self):
|
||||||
|
if self._module_source is None:
|
||||||
|
self._module_source = read_file(self.module_path)
|
||||||
|
return self._module_source
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Invocation(module_name=%s)' % (self.module_name,)
|
||||||
|
|
||||||
|
|
||||||
|
class Planner(object):
|
||||||
|
"""
|
||||||
|
A Planner receives a module name and the contents of its implementation
|
||||||
|
file, indicates whether or not it understands how to run the module, and
|
||||||
|
exports a method to run the module.
|
||||||
|
"""
|
||||||
|
def __init__(self, invocation):
|
||||||
|
self._inv = invocation
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
"""
|
||||||
|
Return true if the supplied `invocation` matches the module type
|
||||||
|
implemented by this planner.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def should_fork(self):
|
||||||
|
"""
|
||||||
|
Asynchronous tasks must always be forked.
|
||||||
|
"""
|
||||||
|
return self._inv.wrap_async
|
||||||
|
|
||||||
|
def get_push_files(self):
|
||||||
|
"""
|
||||||
|
Return a list of files that should be propagated to the target context
|
||||||
|
using PushFileService. The default implementation pushes nothing.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_module_deps(self):
|
||||||
|
"""
|
||||||
|
Return a list of the Python module names imported by the module.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_kwargs(self, **kwargs):
|
||||||
|
"""
|
||||||
|
If :meth:`detect` returned :data:`True`, plan for the module's
|
||||||
|
execution, including granting access to or delivering any files to it
|
||||||
|
that are known to be absent, and finally return a dict::
|
||||||
|
|
||||||
|
{
|
||||||
|
# Name of the class from runners.py that implements the
|
||||||
|
# target-side execution of this module type.
|
||||||
|
"runner_name": "...",
|
||||||
|
|
||||||
|
# Remaining keys are passed to the constructor of the class
|
||||||
|
# named by `runner_name`.
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
binding = self._inv.connection.get_binding()
|
||||||
|
|
||||||
|
new = dict((mitogen.core.UnicodeType(k), kwargs[k])
|
||||||
|
for k in kwargs)
|
||||||
|
new.setdefault('good_temp_dir',
|
||||||
|
self._inv.connection.get_good_temp_dir())
|
||||||
|
new.setdefault('cwd', self._inv.connection.get_default_cwd())
|
||||||
|
new.setdefault('extra_env', self._inv.connection.get_default_env())
|
||||||
|
new.setdefault('emulate_tty', True)
|
||||||
|
new.setdefault('service_context', binding.get_child_service_context())
|
||||||
|
return new
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s()' % (type(self).__name__,)
|
||||||
|
|
||||||
|
|
||||||
|
class BinaryPlanner(Planner):
|
||||||
|
"""
|
||||||
|
Binary modules take their arguments and will return data to Ansible in the
|
||||||
|
same way as want JSON modules.
|
||||||
|
"""
|
||||||
|
runner_name = 'BinaryRunner'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
return ansible.executor.module_common._is_binary(source)
|
||||||
|
|
||||||
|
def get_push_files(self):
|
||||||
|
return [mitogen.core.to_text(self._inv.module_path)]
|
||||||
|
|
||||||
|
def get_kwargs(self, **kwargs):
|
||||||
|
return super(BinaryPlanner, self).get_kwargs(
|
||||||
|
runner_name=self.runner_name,
|
||||||
|
module=self._inv.module_name,
|
||||||
|
path=self._inv.module_path,
|
||||||
|
json_args=json.dumps(self._inv.module_args),
|
||||||
|
env=self._inv.env,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptPlanner(BinaryPlanner):
|
||||||
|
"""
|
||||||
|
Common functionality for script module planners -- handle interpreter
|
||||||
|
detection and rewrite.
|
||||||
|
"""
|
||||||
|
def _rewrite_interpreter(self, path):
|
||||||
|
"""
|
||||||
|
Given the interpreter path (from the script's hashbang line), return
|
||||||
|
the desired interpreter path. This tries, in order
|
||||||
|
|
||||||
|
1. Look up & render the `ansible_*_interpreter` variable, if set
|
||||||
|
2. Look up the `discovered_interpreter_*` fact, if present
|
||||||
|
3. The unmodified path from the hashbang line.
|
||||||
|
|
||||||
|
:param str path:
|
||||||
|
Absolute path to original interpreter (e.g. '/usr/bin/python').
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
Shell fragment prefix used to execute the script via "/bin/sh -c".
|
||||||
|
While `ansible_*_interpreter` documentation suggests shell isn't
|
||||||
|
involved here, the vanilla implementation uses it and that use is
|
||||||
|
exploited in common playbooks.
|
||||||
|
"""
|
||||||
|
interpreter_name = os.path.basename(path).strip()
|
||||||
|
key = u'ansible_%s_interpreter' % interpreter_name
|
||||||
|
try:
|
||||||
|
template = self._inv.task_vars[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
configured_interpreter = self._inv.templar.template(template)
|
||||||
|
return ansible_mitogen.utils.unsafe.cast(configured_interpreter)
|
||||||
|
|
||||||
|
key = u'discovered_interpreter_%s' % interpreter_name
|
||||||
|
try:
|
||||||
|
discovered_interpreter = self._inv.task_vars['ansible_facts'][key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return ansible_mitogen.utils.unsafe.cast(discovered_interpreter)
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _get_interpreter(self):
|
||||||
|
path, arg = ansible_mitogen.parsing.parse_hashbang(
|
||||||
|
self._inv.get_module_source()
|
||||||
|
)
|
||||||
|
if path is None:
|
||||||
|
raise ansible.errors.AnsibleError(NO_INTERPRETER_MSG % (
|
||||||
|
self._inv.module_name,
|
||||||
|
))
|
||||||
|
|
||||||
|
fragment = self._rewrite_interpreter(path)
|
||||||
|
if arg:
|
||||||
|
fragment += ' ' + arg
|
||||||
|
|
||||||
|
is_python = path.startswith('python')
|
||||||
|
return fragment, is_python
|
||||||
|
|
||||||
|
def get_kwargs(self, **kwargs):
|
||||||
|
interpreter_fragment, is_python = self._get_interpreter()
|
||||||
|
return super(ScriptPlanner, self).get_kwargs(
|
||||||
|
interpreter_fragment=interpreter_fragment,
|
||||||
|
is_python=is_python,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class JsonArgsPlanner(ScriptPlanner):
|
||||||
|
"""
|
||||||
|
Script that has its interpreter directive and the task arguments
|
||||||
|
substituted into its source as a JSON string.
|
||||||
|
"""
|
||||||
|
runner_name = 'JsonArgsRunner'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
return ansible.executor.module_common.REPLACER_JSONARGS in source
|
||||||
|
|
||||||
|
|
||||||
|
class WantJsonPlanner(ScriptPlanner):
|
||||||
|
"""
|
||||||
|
If a module has the string WANT_JSON in it anywhere, Ansible treats it as a
|
||||||
|
non-native module that accepts a filename as its only command line
|
||||||
|
parameter. The filename is for a temporary file containing a JSON string
|
||||||
|
containing the module's parameters. The module needs to open the file, read
|
||||||
|
and parse the parameters, operate on the data, and print its return data as
|
||||||
|
a JSON encoded dictionary to stdout before exiting.
|
||||||
|
|
||||||
|
These types of modules are self-contained entities. As of Ansible 2.1,
|
||||||
|
Ansible only modifies them to change a shebang line if present.
|
||||||
|
"""
|
||||||
|
runner_name = 'WantJsonRunner'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
return b'WANT_JSON' in source
|
||||||
|
|
||||||
|
|
||||||
|
class NewStylePlanner(ScriptPlanner):
|
||||||
|
"""
|
||||||
|
The Ansiballz framework differs from module replacer in that it uses real
|
||||||
|
Python imports of things in ansible/module_utils instead of merely
|
||||||
|
preprocessing the module.
|
||||||
|
"""
|
||||||
|
runner_name = 'NewStyleRunner'
|
||||||
|
MARKER = re.compile(br'from ansible(?:_collections|\.module_utils)\.')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
return cls.MARKER.search(source) is not None
|
||||||
|
|
||||||
|
def _get_interpreter(self):
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def get_push_files(self):
|
||||||
|
return super(NewStylePlanner, self).get_push_files() + [
|
||||||
|
mitogen.core.to_text(path)
|
||||||
|
for fullname, path, is_pkg in self.get_module_map()['custom']
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_module_deps(self):
|
||||||
|
return self.get_module_map()['builtin']
|
||||||
|
|
||||||
|
#: Module names appearing in this set always require forking, usually due
|
||||||
|
#: to some terminal leakage that cannot be worked around in any sane
|
||||||
|
#: manner.
|
||||||
|
ALWAYS_FORK_MODULES = frozenset([
|
||||||
|
'dnf', # issue #280; py-dnf/hawkey need therapy
|
||||||
|
'firewalld', # issue #570: ansible module_utils caches dbus conn
|
||||||
|
'ansible.legacy.dnf', # issue #776
|
||||||
|
'ansible.builtin.dnf', # issue #832
|
||||||
|
])
|
||||||
|
|
||||||
|
def should_fork(self):
|
||||||
|
"""
|
||||||
|
In addition to asynchronous tasks, new-style modules should be forked
|
||||||
|
if:
|
||||||
|
|
||||||
|
* the user specifies mitogen_task_isolation=fork, or
|
||||||
|
* the new-style module has a custom module search path, or
|
||||||
|
* the module is known to leak like a sieve.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
super(NewStylePlanner, self).should_fork() or
|
||||||
|
(self._inv.task_vars.get('mitogen_task_isolation') == 'fork') or
|
||||||
|
(self._inv.module_name in self.ALWAYS_FORK_MODULES) or
|
||||||
|
(len(self.get_module_map()['custom']) > 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_search_path(self):
|
||||||
|
return tuple(
|
||||||
|
path
|
||||||
|
for path in ansible_mitogen.loaders.module_utils_loader._get_paths(
|
||||||
|
subdirs=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
_module_map = None
|
||||||
|
|
||||||
|
def get_module_map(self):
|
||||||
|
if self._module_map is None:
|
||||||
|
binding = self._inv.connection.get_binding()
|
||||||
|
self._module_map = mitogen.service.call(
|
||||||
|
call_context=binding.get_service_context(),
|
||||||
|
service_name='ansible_mitogen.services.ModuleDepService',
|
||||||
|
method_name='scan',
|
||||||
|
|
||||||
|
module_name='ansible_module_%s' % (self._inv.module_name,),
|
||||||
|
module_path=self._inv.module_path,
|
||||||
|
search_path=self.get_search_path(),
|
||||||
|
builtin_path=ansible.executor.module_common._MODULE_UTILS_PATH,
|
||||||
|
context=self._inv.connection.context,
|
||||||
|
)
|
||||||
|
return self._module_map
|
||||||
|
|
||||||
|
def get_kwargs(self):
|
||||||
|
return super(NewStylePlanner, self).get_kwargs(
|
||||||
|
module_map=self.get_module_map(),
|
||||||
|
py_module_name=py_modname_from_path(
|
||||||
|
self._inv.module_name,
|
||||||
|
self._inv.module_path,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ReplacerPlanner(NewStylePlanner):
|
||||||
|
"""
|
||||||
|
The Module Replacer framework is the original framework implementing
|
||||||
|
new-style modules. It is essentially a preprocessor (like the C
|
||||||
|
Preprocessor for those familiar with that programming language). It does
|
||||||
|
straight substitutions of specific substring patterns in the module file.
|
||||||
|
There are two types of substitutions.
|
||||||
|
|
||||||
|
* Replacements that only happen in the module file. These are public
|
||||||
|
replacement strings that modules can utilize to get helpful boilerplate
|
||||||
|
or access to arguments.
|
||||||
|
|
||||||
|
"from ansible.module_utils.MOD_LIB_NAME import *" is replaced with the
|
||||||
|
contents of the ansible/module_utils/MOD_LIB_NAME.py. These should only
|
||||||
|
be used with new-style Python modules.
|
||||||
|
|
||||||
|
"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>" is equivalent to
|
||||||
|
"from ansible.module_utils.basic import *" and should also only apply to
|
||||||
|
new-style Python modules.
|
||||||
|
|
||||||
|
"# POWERSHELL_COMMON" substitutes the contents of
|
||||||
|
"ansible/module_utils/powershell.ps1". It should only be used with
|
||||||
|
new-style Powershell modules.
|
||||||
|
"""
|
||||||
|
runner_name = 'ReplacerRunner'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
return ansible.executor.module_common.REPLACER in source
|
||||||
|
|
||||||
|
|
||||||
|
class OldStylePlanner(ScriptPlanner):
|
||||||
|
runner_name = 'OldStyleRunner'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def detect(cls, path, source):
|
||||||
|
# Everything else.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
_planners = [
|
||||||
|
BinaryPlanner,
|
||||||
|
# ReplacerPlanner,
|
||||||
|
NewStylePlanner,
|
||||||
|
JsonArgsPlanner,
|
||||||
|
WantJsonPlanner,
|
||||||
|
OldStylePlanner,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def py_modname_from_path(name, path):
|
||||||
|
"""
|
||||||
|
Fetch the logical name of a new-style module as it might appear in
|
||||||
|
:data:`sys.modules` of the target's Python interpreter.
|
||||||
|
|
||||||
|
* Since Ansible 2.9, modules appearing within a package have the original
|
||||||
|
package hierarchy approximated on the target, enabling relative imports
|
||||||
|
to function correctly. For example, "ansible.modules.system.setup".
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return ansible.executor.module_common._get_ansible_module_fqn(path)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return 'ansible.modules.' + name
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(path):
|
||||||
|
fd = os.open(path, os.O_RDONLY)
|
||||||
|
try:
|
||||||
|
bits = []
|
||||||
|
chunk = True
|
||||||
|
while True:
|
||||||
|
chunk = os.read(fd, 65536)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
bits.append(chunk)
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
|
return mitogen.core.b('').join(bits)
|
||||||
|
|
||||||
|
|
||||||
|
def _propagate_deps(invocation, planner, context):
|
||||||
|
binding = invocation.connection.get_binding()
|
||||||
|
mitogen.service.call(
|
||||||
|
call_context=binding.get_service_context(),
|
||||||
|
service_name='mitogen.service.PushFileService',
|
||||||
|
method_name='propagate_paths_and_modules',
|
||||||
|
|
||||||
|
context=context,
|
||||||
|
paths=planner.get_push_files(),
|
||||||
|
# modules=planner.get_module_deps(), TODO
|
||||||
|
overridden_sources=invocation._overridden_sources,
|
||||||
|
# needs to be a list because can't unpickle() a set()
|
||||||
|
extra_sys_paths=list(invocation._extra_sys_paths),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _invoke_async_task(invocation, planner):
|
||||||
|
job_id = '%016x' % random.randint(0, 2**64)
|
||||||
|
context = invocation.connection.spawn_isolated_child()
|
||||||
|
_propagate_deps(invocation, planner, context)
|
||||||
|
|
||||||
|
with mitogen.core.Receiver(context.router) as started_recv:
|
||||||
|
call_recv = context.call_async(
|
||||||
|
ansible_mitogen.target.run_module_async,
|
||||||
|
job_id=job_id,
|
||||||
|
timeout_secs=invocation.timeout_secs,
|
||||||
|
started_sender=started_recv.to_sender(),
|
||||||
|
kwargs=planner.get_kwargs(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait for run_module_async() to crash, or for AsyncRunner to indicate
|
||||||
|
# the job file has been written.
|
||||||
|
for msg in mitogen.select.Select([started_recv, call_recv]):
|
||||||
|
if msg.receiver is call_recv:
|
||||||
|
# It can only be an exception.
|
||||||
|
raise msg.unpickle()
|
||||||
|
break
|
||||||
|
|
||||||
|
return {
|
||||||
|
'stdout': json.dumps({
|
||||||
|
# modules/utilities/logic/async_wrapper.py::_run_module().
|
||||||
|
'changed': True,
|
||||||
|
'started': 1,
|
||||||
|
'finished': 0,
|
||||||
|
'ansible_job_id': job_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _invoke_isolated_task(invocation, planner):
|
||||||
|
context = invocation.connection.spawn_isolated_child()
|
||||||
|
_propagate_deps(invocation, planner, context)
|
||||||
|
try:
|
||||||
|
return context.call(
|
||||||
|
ansible_mitogen.target.run_module,
|
||||||
|
kwargs=planner.get_kwargs(),
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
context.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_planner(invocation, source):
|
||||||
|
for klass in _planners:
|
||||||
|
if klass.detect(invocation.module_path, source):
|
||||||
|
LOG.debug(
|
||||||
|
'%r accepted %r (filename %r)',
|
||||||
|
klass, invocation.module_name, invocation.module_path,
|
||||||
|
)
|
||||||
|
return klass
|
||||||
|
LOG.debug('%r rejected %r', klass, invocation.module_name)
|
||||||
|
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_py35(invocation, module_source):
|
||||||
|
"""
|
||||||
|
super edge case with a relative import error in Python 3.5.1-3.5.3
|
||||||
|
in Ansible's setup module when using Mitogen
|
||||||
|
https://github.com/dw/mitogen/issues/672#issuecomment-636408833
|
||||||
|
We replace a relative import in the setup module with the actual full file path
|
||||||
|
This works in vanilla Ansible but not in Mitogen otherwise
|
||||||
|
"""
|
||||||
|
if invocation.module_name in {'ansible.builtin.setup', 'ansible.legacy.setup', 'setup'} and \
|
||||||
|
invocation.module_path not in invocation._overridden_sources:
|
||||||
|
# in-memory replacement of setup module's relative import
|
||||||
|
# would check for just python3.5 and run this then but we don't know the
|
||||||
|
# target python at this time yet
|
||||||
|
# NOTE: another ansible 2.10-specific fix: `from ..module_utils` used to be `from ...module_utils`
|
||||||
|
module_source = module_source.replace(
|
||||||
|
b"from ..module_utils.basic import AnsibleModule",
|
||||||
|
b"from ansible.module_utils.basic import AnsibleModule"
|
||||||
|
)
|
||||||
|
invocation._overridden_sources[invocation.module_path] = module_source
|
||||||
|
|
||||||
|
|
||||||
|
def _load_collections(invocation):
|
||||||
|
"""
|
||||||
|
Special loader that ensures that `ansible_collections` exist as a module path for import
|
||||||
|
Goes through all collection path possibilities and stores paths to installed collections
|
||||||
|
Stores them on the current invocation to later be passed to the master service
|
||||||
|
"""
|
||||||
|
for collection_path in ansible.collections.list.list_collection_dirs():
|
||||||
|
invocation._extra_sys_paths.add(collection_path.decode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
|
def invoke(invocation):
|
||||||
|
"""
|
||||||
|
Find a Planner subclass corresponding to `invocation` and use it to invoke
|
||||||
|
the module.
|
||||||
|
|
||||||
|
:param Invocation invocation:
|
||||||
|
:returns:
|
||||||
|
Module return dict.
|
||||||
|
:raises ansible.errors.AnsibleError:
|
||||||
|
Unrecognized/unsupported module type.
|
||||||
|
"""
|
||||||
|
path = ansible_mitogen.loaders.module_loader.find_plugin(
|
||||||
|
invocation.module_name,
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
if path is None:
|
||||||
|
raise ansible.errors.AnsibleError(NO_MODULE_MSG % (
|
||||||
|
invocation.module_name,
|
||||||
|
))
|
||||||
|
|
||||||
|
invocation.module_path = mitogen.core.to_text(path)
|
||||||
|
if invocation.module_path not in _planner_by_path:
|
||||||
|
if 'ansible_collections' in invocation.module_path:
|
||||||
|
_load_collections(invocation)
|
||||||
|
|
||||||
|
module_source = invocation.get_module_source()
|
||||||
|
_fix_py35(invocation, module_source)
|
||||||
|
_planner_by_path[invocation.module_path] = _get_planner(
|
||||||
|
invocation,
|
||||||
|
module_source
|
||||||
|
)
|
||||||
|
|
||||||
|
planner = _planner_by_path[invocation.module_path](invocation)
|
||||||
|
if invocation.wrap_async:
|
||||||
|
response = _invoke_async_task(invocation, planner)
|
||||||
|
elif planner.should_fork():
|
||||||
|
response = _invoke_isolated_task(invocation, planner)
|
||||||
|
else:
|
||||||
|
_propagate_deps(invocation, planner, invocation.connection.context)
|
||||||
|
response = invocation.connection.get_chain().call(
|
||||||
|
ansible_mitogen.target.run_module,
|
||||||
|
kwargs=planner.get_kwargs(),
|
||||||
|
)
|
||||||
|
|
||||||
|
return invocation.action._postprocess_response(response)
|
0
mitogen-0.3.9/ansible_mitogen/plugins/__init__.py
Normal file
0
mitogen-0.3.9/ansible_mitogen/plugins/__init__.py
Normal file
207
mitogen-0.3.9/ansible_mitogen/plugins/action/mitogen_fetch.py
Normal file
207
mitogen-0.3.9/ansible_mitogen/plugins/action/mitogen_fetch.py
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleActionSkip
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible.module_utils.six import string_types
|
||||||
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
|
from ansible.plugins.action import ActionBase
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
|
||||||
|
from ansible.utils.path import makedirs_safe, is_subpath
|
||||||
|
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(ActionBase):
|
||||||
|
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
''' handler for fetch operations '''
|
||||||
|
if task_vars is None:
|
||||||
|
task_vars = dict()
|
||||||
|
|
||||||
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
del tmp # tmp no longer has any effect
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self._play_context.check_mode:
|
||||||
|
raise AnsibleActionSkip('check mode not (yet) supported for this module')
|
||||||
|
|
||||||
|
source = self._task.args.get('src', None)
|
||||||
|
original_dest = dest = self._task.args.get('dest', None)
|
||||||
|
flat = boolean(self._task.args.get('flat'), strict=False)
|
||||||
|
fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
|
||||||
|
validate_checksum = boolean(self._task.args.get('validate_checksum', True), strict=False)
|
||||||
|
|
||||||
|
msg = ''
|
||||||
|
# validate source and dest are strings FIXME: use basic.py and module specs
|
||||||
|
if not isinstance(source, string_types):
|
||||||
|
msg = "Invalid type supplied for source option, it must be a string"
|
||||||
|
|
||||||
|
if not isinstance(dest, string_types):
|
||||||
|
msg = "Invalid type supplied for dest option, it must be a string"
|
||||||
|
|
||||||
|
if source is None or dest is None:
|
||||||
|
msg = "src and dest are required"
|
||||||
|
|
||||||
|
if msg:
|
||||||
|
raise AnsibleActionFail(msg)
|
||||||
|
|
||||||
|
source = self._connection._shell.join_path(source)
|
||||||
|
source = self._remote_expand_user(source)
|
||||||
|
|
||||||
|
remote_stat = {}
|
||||||
|
remote_checksum = None
|
||||||
|
if True:
|
||||||
|
# Get checksum for the remote file even using become. Mitogen doesn't need slurp.
|
||||||
|
# Follow symlinks because fetch always follows symlinks
|
||||||
|
try:
|
||||||
|
remote_stat = self._execute_remote_stat(source, all_vars=task_vars, follow=True)
|
||||||
|
except AnsibleError as ae:
|
||||||
|
result['changed'] = False
|
||||||
|
result['file'] = source
|
||||||
|
if fail_on_missing:
|
||||||
|
result['failed'] = True
|
||||||
|
result['msg'] = to_text(ae)
|
||||||
|
else:
|
||||||
|
result['msg'] = "%s, ignored" % to_text(ae, errors='surrogate_or_replace')
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
remote_checksum = remote_stat.get('checksum')
|
||||||
|
if remote_stat.get('exists'):
|
||||||
|
if remote_stat.get('isdir'):
|
||||||
|
result['failed'] = True
|
||||||
|
result['changed'] = False
|
||||||
|
result['msg'] = "remote file is a directory, fetch cannot work on directories"
|
||||||
|
|
||||||
|
# Historically, these don't fail because you may want to transfer
|
||||||
|
# a log file that possibly MAY exist but keep going to fetch other
|
||||||
|
# log files. Today, this is better achieved by adding
|
||||||
|
# ignore_errors or failed_when to the task. Control the behaviour
|
||||||
|
# via fail_when_missing
|
||||||
|
if not fail_on_missing:
|
||||||
|
result['msg'] += ", not transferring, ignored"
|
||||||
|
del result['changed']
|
||||||
|
del result['failed']
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
# use slurp if permissions are lacking or privilege escalation is needed
|
||||||
|
remote_data = None
|
||||||
|
if remote_checksum in (None, '1', ''):
|
||||||
|
slurpres = self._execute_module(module_name='ansible.legacy.slurp', module_args=dict(src=source), task_vars=task_vars)
|
||||||
|
if slurpres.get('failed'):
|
||||||
|
if not fail_on_missing:
|
||||||
|
result['file'] = source
|
||||||
|
result['changed'] = False
|
||||||
|
else:
|
||||||
|
result.update(slurpres)
|
||||||
|
|
||||||
|
if 'not found' in slurpres.get('msg', ''):
|
||||||
|
result['msg'] = "the remote file does not exist, not transferring, ignored"
|
||||||
|
elif slurpres.get('msg', '').startswith('source is a directory'):
|
||||||
|
result['msg'] = "remote file is a directory, fetch cannot work on directories"
|
||||||
|
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
if slurpres['encoding'] == 'base64':
|
||||||
|
remote_data = base64.b64decode(slurpres['content'])
|
||||||
|
if remote_data is not None:
|
||||||
|
remote_checksum = checksum_s(remote_data)
|
||||||
|
|
||||||
|
# calculate the destination name
|
||||||
|
if os.path.sep not in self._connection._shell.join_path('a', ''):
|
||||||
|
source = self._connection._shell._unquote(source)
|
||||||
|
source_local = source.replace('\\', '/')
|
||||||
|
else:
|
||||||
|
source_local = source
|
||||||
|
|
||||||
|
# ensure we only use file name, avoid relative paths
|
||||||
|
if not is_subpath(dest, original_dest):
|
||||||
|
# TODO: ? dest = os.path.expanduser(dest.replace(('../','')))
|
||||||
|
raise AnsibleActionFail("Detected directory traversal, expected to be contained in '%s' but got '%s'" % (original_dest, dest))
|
||||||
|
|
||||||
|
if flat:
|
||||||
|
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
|
||||||
|
raise AnsibleActionFail("dest is an existing directory, use a trailing slash if you want to fetch src into that directory")
|
||||||
|
if dest.endswith(os.sep):
|
||||||
|
# if the path ends with "/", we'll use the source filename as the
|
||||||
|
# destination filename
|
||||||
|
base = os.path.basename(source_local)
|
||||||
|
dest = os.path.join(dest, base)
|
||||||
|
if not dest.startswith("/"):
|
||||||
|
# if dest does not start with "/", we'll assume a relative path
|
||||||
|
dest = self._loader.path_dwim(dest)
|
||||||
|
else:
|
||||||
|
# files are saved in dest dir, with a subdir for each host, then the filename
|
||||||
|
if 'inventory_hostname' in task_vars:
|
||||||
|
target_name = task_vars['inventory_hostname']
|
||||||
|
else:
|
||||||
|
target_name = self._play_context.remote_addr
|
||||||
|
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
|
||||||
|
|
||||||
|
dest = os.path.normpath(dest)
|
||||||
|
|
||||||
|
# calculate checksum for the local file
|
||||||
|
local_checksum = checksum(dest)
|
||||||
|
|
||||||
|
if remote_checksum != local_checksum:
|
||||||
|
# create the containing directories, if needed
|
||||||
|
makedirs_safe(os.path.dirname(dest))
|
||||||
|
|
||||||
|
# fetch the file and check for changes
|
||||||
|
if remote_data is None:
|
||||||
|
self._connection.fetch_file(source, dest)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
|
||||||
|
f.write(remote_data)
|
||||||
|
f.close()
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
raise AnsibleActionFail("Failed to fetch the file: %s" % e)
|
||||||
|
new_checksum = secure_hash(dest)
|
||||||
|
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||||
|
try:
|
||||||
|
new_md5 = md5(dest)
|
||||||
|
except ValueError:
|
||||||
|
new_md5 = None
|
||||||
|
|
||||||
|
if validate_checksum and new_checksum != remote_checksum:
|
||||||
|
result.update(dict(failed=True, md5sum=new_md5,
|
||||||
|
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
|
||||||
|
checksum=new_checksum, remote_checksum=remote_checksum))
|
||||||
|
else:
|
||||||
|
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
|
||||||
|
'remote_md5sum': None, 'checksum': new_checksum,
|
||||||
|
'remote_checksum': remote_checksum})
|
||||||
|
else:
|
||||||
|
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||||
|
try:
|
||||||
|
local_md5 = md5(dest)
|
||||||
|
except ValueError:
|
||||||
|
local_md5 = None
|
||||||
|
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
|
||||||
|
|
||||||
|
finally:
|
||||||
|
self._remove_tmp_path(self._connection._shell.tmpdir)
|
||||||
|
|
||||||
|
return result
|
@ -0,0 +1,58 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Fetch the connection configuration stack that would be used to connect to a
|
||||||
|
target, without actually connecting to it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
from ansible.plugins.action import ActionBase
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(ActionBase):
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
if not isinstance(self._connection,
|
||||||
|
ansible_mitogen.connection.Connection):
|
||||||
|
return {
|
||||||
|
'skipped': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, stack = self._connection._build_stack()
|
||||||
|
return {
|
||||||
|
'changed': True,
|
||||||
|
'result': stack,
|
||||||
|
'_ansible_verbose_always': True,
|
||||||
|
# for ansible < 2.8, we'll default to /usr/bin/python like before
|
||||||
|
'discovered_interpreter': self._connection._action._discovered_interpreter
|
||||||
|
}
|
Binary file not shown.
Binary file not shown.
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'buildah'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'mitogen_doas'
|
@ -0,0 +1,53 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'docker'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def docker_cmd(self):
|
||||||
|
"""
|
||||||
|
Ansible 2.3 synchronize module wants to know how we run Docker.
|
||||||
|
"""
|
||||||
|
return 'docker'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'jail'
|
@ -0,0 +1,82 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
# Copyright 2018, Yannig Perré
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import ansible.errors
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
|
||||||
|
|
||||||
|
_get_result = ansible_mitogen.loaders.connection_loader__get(
|
||||||
|
'kubectl',
|
||||||
|
class_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'kubectl'
|
||||||
|
|
||||||
|
not_supported_msg = (
|
||||||
|
'The "mitogen_kubectl" plug-in requires a version of Ansible '
|
||||||
|
'that ships with the "kubectl" connection plug-in.'
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if not _get_result:
|
||||||
|
raise ansible.errors.AnsibleConnectionFailure(self.not_supported_msg)
|
||||||
|
super(Connection, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_extra_args(self):
|
||||||
|
try:
|
||||||
|
# Ansible < 2.10, _get_result is the connection class
|
||||||
|
connection_options = _get_result.connection_options
|
||||||
|
except AttributeError:
|
||||||
|
# Ansible >= 2.10, _get_result is a get_with_context_result
|
||||||
|
connection_options = _get_result.object.connection_options
|
||||||
|
parameters = []
|
||||||
|
for key in connection_options:
|
||||||
|
task_var_name = 'ansible_%s' % key
|
||||||
|
task_var = self.get_task_var(task_var_name)
|
||||||
|
if task_var is not None:
|
||||||
|
parameters += [connection_options[key], task_var]
|
||||||
|
|
||||||
|
return parameters
|
@ -0,0 +1,88 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
import ansible_mitogen.process
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info > (3,):
|
||||||
|
viewkeys = dict.keys
|
||||||
|
elif sys.version_info > (2, 7):
|
||||||
|
viewkeys = dict.viewkeys
|
||||||
|
else:
|
||||||
|
viewkeys = lambda dct: set(dct)
|
||||||
|
|
||||||
|
|
||||||
|
def dict_diff(old, new):
|
||||||
|
"""
|
||||||
|
Return a dict representing the differences between the dicts `old` and
|
||||||
|
`new`. Deleted keys appear as a key with the value :data:`None`, added and
|
||||||
|
changed keys appear as a key with the new value.
|
||||||
|
"""
|
||||||
|
old_keys = viewkeys(old)
|
||||||
|
new_keys = viewkeys(dict(new))
|
||||||
|
out = {}
|
||||||
|
for key in new_keys - old_keys:
|
||||||
|
out[key] = new[key]
|
||||||
|
for key in old_keys - new_keys:
|
||||||
|
out[key] = None
|
||||||
|
for key in old_keys & new_keys:
|
||||||
|
if old[key] != new[key]:
|
||||||
|
out[key] = new[key]
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'local'
|
||||||
|
|
||||||
|
def get_default_cwd(self):
|
||||||
|
# https://github.com/ansible/ansible/issues/14489
|
||||||
|
return self.loader_basedir
|
||||||
|
|
||||||
|
def get_default_env(self):
|
||||||
|
"""
|
||||||
|
Vanilla Ansible local commands execute with an environment inherited
|
||||||
|
from WorkerProcess, we must emulate that.
|
||||||
|
"""
|
||||||
|
return dict_diff(
|
||||||
|
old=ansible_mitogen.process.MuxProcess.cls_original_env,
|
||||||
|
new=os.environ,
|
||||||
|
)
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'lxc'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'lxd'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'machinectl'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2022, Mitogen contributers
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'podman'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'setns'
|
@ -0,0 +1,87 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
author: David Wilson <dw@botanicus.net>
|
||||||
|
connection: mitogen_ssh
|
||||||
|
short_description: Connect over SSH via Mitogen
|
||||||
|
description:
|
||||||
|
- This connects using an OpenSSH client controlled by the Mitogen for
|
||||||
|
Ansible extension. It accepts every option the vanilla ssh plugin
|
||||||
|
accepts.
|
||||||
|
version_added: "2.5"
|
||||||
|
options:
|
||||||
|
ssh_args:
|
||||||
|
type: str
|
||||||
|
vars:
|
||||||
|
- name: ssh_args
|
||||||
|
- name: ansible_ssh_args
|
||||||
|
- name: ansible_mitogen_ssh_args
|
||||||
|
ssh_common_args:
|
||||||
|
type: str
|
||||||
|
vars:
|
||||||
|
- name: ssh_args
|
||||||
|
- name: ansible_ssh_common_args
|
||||||
|
- name: ansible_mitogen_ssh_common_args
|
||||||
|
ssh_extra_args:
|
||||||
|
type: str
|
||||||
|
vars:
|
||||||
|
- name: ssh_args
|
||||||
|
- name: ansible_ssh_extra_args
|
||||||
|
- name: ansible_mitogen_ssh_extra_args
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'ssh'
|
||||||
|
vanilla_class = ansible_mitogen.loaders.connection_loader__get(
|
||||||
|
'ssh',
|
||||||
|
class_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _create_control_path(*args, **kwargs):
|
||||||
|
"""Forward _create_control_path() to the implementation in ssh.py."""
|
||||||
|
# https://github.com/dw/mitogen/issues/342
|
||||||
|
return Connection.vanilla_class._create_control_path(*args, **kwargs)
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'mitogen_su'
|
@ -0,0 +1,46 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
except ImportError:
|
||||||
|
base_dir = os.path.dirname(__file__)
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
||||||
|
del base_dir
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'mitogen_sudo'
|
Binary file not shown.
63
mitogen-0.3.9/ansible_mitogen/plugins/strategy/mitogen.py
Normal file
63
mitogen-0.3.9/ansible_mitogen/plugins/strategy/mitogen.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
#
|
||||||
|
# This is not the real Strategy implementation module, it simply exists as a
|
||||||
|
# proxy to the real module, which is loaded using Python's regular import
|
||||||
|
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||||
|
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||||
|
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||||
|
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||||
|
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||||
|
#
|
||||||
|
# Therefore we have a proxy module that imports it under the real name, and
|
||||||
|
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||||
|
# the real module, so duplicate types don't exist in memory, and things like
|
||||||
|
# debuggers and isinstance() work predictably.
|
||||||
|
#
|
||||||
|
|
||||||
|
BASE_DIR = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), '../../..')
|
||||||
|
)
|
||||||
|
|
||||||
|
if BASE_DIR not in sys.path:
|
||||||
|
sys.path.insert(0, BASE_DIR)
|
||||||
|
|
||||||
|
import ansible_mitogen.strategy
|
||||||
|
import ansible.plugins.strategy.linear
|
||||||
|
|
||||||
|
|
||||||
|
class StrategyModule(ansible_mitogen.strategy.StrategyMixin,
|
||||||
|
ansible.plugins.strategy.linear.StrategyModule):
|
||||||
|
pass
|
@ -0,0 +1,64 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
#
|
||||||
|
# This is not the real Strategy implementation module, it simply exists as a
|
||||||
|
# proxy to the real module, which is loaded using Python's regular import
|
||||||
|
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||||
|
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||||
|
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||||
|
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||||
|
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||||
|
#
|
||||||
|
# Therefore we have a proxy module that imports it under the real name, and
|
||||||
|
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||||
|
# the real module, so duplicate types don't exist in memory, and things like
|
||||||
|
# debuggers and isinstance() work predictably.
|
||||||
|
#
|
||||||
|
|
||||||
|
BASE_DIR = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), '../../..')
|
||||||
|
)
|
||||||
|
|
||||||
|
if BASE_DIR not in sys.path:
|
||||||
|
sys.path.insert(0, BASE_DIR)
|
||||||
|
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.strategy
|
||||||
|
|
||||||
|
|
||||||
|
Base = ansible_mitogen.loaders.strategy_loader.get('free', class_only=True)
|
||||||
|
|
||||||
|
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||||
|
pass
|
@ -0,0 +1,69 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
#
|
||||||
|
# This is not the real Strategy implementation module, it simply exists as a
|
||||||
|
# proxy to the real module, which is loaded using Python's regular import
|
||||||
|
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||||
|
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||||
|
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||||
|
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||||
|
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||||
|
#
|
||||||
|
# Therefore we have a proxy module that imports it under the real name, and
|
||||||
|
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||||
|
# the real module, so duplicate types don't exist in memory, and things like
|
||||||
|
# debuggers and isinstance() work predictably.
|
||||||
|
#
|
||||||
|
|
||||||
|
BASE_DIR = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), '../../..')
|
||||||
|
)
|
||||||
|
|
||||||
|
if BASE_DIR not in sys.path:
|
||||||
|
sys.path.insert(0, BASE_DIR)
|
||||||
|
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.strategy
|
||||||
|
|
||||||
|
|
||||||
|
Base = ansible_mitogen.loaders.strategy_loader.get('host_pinned', class_only=True)
|
||||||
|
|
||||||
|
if Base is None:
|
||||||
|
raise ImportError(
|
||||||
|
'The host_pinned strategy is only available in Ansible 2.7 or newer.'
|
||||||
|
)
|
||||||
|
|
||||||
|
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||||
|
pass
|
@ -0,0 +1,64 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
#
|
||||||
|
# This is not the real Strategy implementation module, it simply exists as a
|
||||||
|
# proxy to the real module, which is loaded using Python's regular import
|
||||||
|
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
||||||
|
# results in ansible_mitogen plugin modules being loaded twice: once by
|
||||||
|
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
||||||
|
# stuffed into sys.modules even though attempting to import it will trigger an
|
||||||
|
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
||||||
|
#
|
||||||
|
# Therefore we have a proxy module that imports it under the real name, and
|
||||||
|
# sets up the duff PluginLoader-imported module to just contain objects from
|
||||||
|
# the real module, so duplicate types don't exist in memory, and things like
|
||||||
|
# debuggers and isinstance() work predictably.
|
||||||
|
#
|
||||||
|
|
||||||
|
BASE_DIR = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), '../../..')
|
||||||
|
)
|
||||||
|
|
||||||
|
if BASE_DIR not in sys.path:
|
||||||
|
sys.path.insert(0, BASE_DIR)
|
||||||
|
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.strategy
|
||||||
|
|
||||||
|
|
||||||
|
Base = ansible_mitogen.loaders.strategy_loader.get('linear', class_only=True)
|
||||||
|
|
||||||
|
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
||||||
|
pass
|
710
mitogen-0.3.9/ansible_mitogen/process.py
Normal file
710
mitogen-0.3.9/ansible_mitogen/process.py
Normal file
@ -0,0 +1,710 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import atexit
|
||||||
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
import resource
|
||||||
|
import socket
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import faulthandler
|
||||||
|
except ImportError:
|
||||||
|
faulthandler = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import setproctitle
|
||||||
|
except ImportError:
|
||||||
|
setproctitle = None
|
||||||
|
|
||||||
|
import mitogen
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.debug
|
||||||
|
import mitogen.fork
|
||||||
|
import mitogen.master
|
||||||
|
import mitogen.parent
|
||||||
|
import mitogen.service
|
||||||
|
import mitogen.unix
|
||||||
|
import mitogen.utils
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
import ansible.constants as C
|
||||||
|
import ansible.errors
|
||||||
|
import ansible_mitogen.logging
|
||||||
|
import ansible_mitogen.services
|
||||||
|
|
||||||
|
from mitogen.core import b
|
||||||
|
import ansible_mitogen.affinity
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ANSIBLE_PKG_OVERRIDE = (
|
||||||
|
u"__version__ = %r\n"
|
||||||
|
u"__author__ = %r\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
MAX_MESSAGE_SIZE = 4096 * 1048576
|
||||||
|
|
||||||
|
worker_model_msg = (
|
||||||
|
'Mitogen connection types may only be instantiated when one of the '
|
||||||
|
'"mitogen_*" or "operon_*" strategies are active.'
|
||||||
|
)
|
||||||
|
|
||||||
|
shutting_down_msg = (
|
||||||
|
'The task worker cannot connect. Ansible may be shutting down, or '
|
||||||
|
'the maximum open files limit may have been exceeded. If this occurs '
|
||||||
|
'midway through a run, please retry after increasing the open file '
|
||||||
|
'limit (ulimit -n). Original error: %s'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#: The worker model as configured by the currently running strategy. This is
|
||||||
|
#: managed via :func:`get_worker_model` / :func:`set_worker_model` functions by
|
||||||
|
#: :class:`StrategyMixin`.
|
||||||
|
_worker_model = None
|
||||||
|
|
||||||
|
|
||||||
|
#: A copy of the sole :class:`ClassicWorkerModel` that ever exists during a
|
||||||
|
#: classic run, as return by :func:`get_classic_worker_model`.
|
||||||
|
_classic_worker_model = None
|
||||||
|
|
||||||
|
|
||||||
|
def set_worker_model(model):
|
||||||
|
"""
|
||||||
|
To remove process model-wiring from
|
||||||
|
:class:`ansible_mitogen.connection.Connection`, it is necessary to track
|
||||||
|
some idea of the configured execution environment outside the connection
|
||||||
|
plug-in.
|
||||||
|
|
||||||
|
That is what :func:`set_worker_model` and :func:`get_worker_model` are for.
|
||||||
|
"""
|
||||||
|
global _worker_model
|
||||||
|
assert model is None or _worker_model is None
|
||||||
|
_worker_model = model
|
||||||
|
|
||||||
|
|
||||||
|
def get_worker_model():
|
||||||
|
"""
|
||||||
|
Return the :class:`WorkerModel` currently configured by the running
|
||||||
|
strategy.
|
||||||
|
"""
|
||||||
|
if _worker_model is None:
|
||||||
|
raise ansible.errors.AnsibleConnectionFailure(worker_model_msg)
|
||||||
|
return _worker_model
|
||||||
|
|
||||||
|
|
||||||
|
def get_classic_worker_model(**kwargs):
|
||||||
|
"""
|
||||||
|
Return the single :class:`ClassicWorkerModel` instance, constructing it if
|
||||||
|
necessary.
|
||||||
|
"""
|
||||||
|
global _classic_worker_model
|
||||||
|
assert _classic_worker_model is None or (not kwargs), \
|
||||||
|
"ClassicWorkerModel kwargs supplied but model already constructed"
|
||||||
|
|
||||||
|
if _classic_worker_model is None:
|
||||||
|
_classic_worker_model = ClassicWorkerModel(**kwargs)
|
||||||
|
return _classic_worker_model
|
||||||
|
|
||||||
|
|
||||||
|
def getenv_int(key, default=0):
|
||||||
|
"""
|
||||||
|
Get an integer-valued environment variable `key`, if it exists and parses
|
||||||
|
as an integer, otherwise return `default`.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return int(os.environ.get(key, str(default)))
|
||||||
|
except ValueError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
def save_pid(name):
|
||||||
|
"""
|
||||||
|
When debugging and profiling, it is very annoying to poke through the
|
||||||
|
process list to discover the currently running Ansible and MuxProcess IDs,
|
||||||
|
especially when trying to catch an issue during early startup. So here, if
|
||||||
|
a magic environment variable set, stash them in hidden files in the CWD::
|
||||||
|
|
||||||
|
alias muxpid="cat .ansible-mux.pid"
|
||||||
|
alias anspid="cat .ansible-controller.pid"
|
||||||
|
|
||||||
|
gdb -p $(muxpid)
|
||||||
|
perf top -p $(anspid)
|
||||||
|
"""
|
||||||
|
if os.environ.get('MITOGEN_SAVE_PIDS'):
|
||||||
|
with open('.ansible-%s.pid' % (name,), 'w') as fp:
|
||||||
|
fp.write(str(os.getpid()))
|
||||||
|
|
||||||
|
|
||||||
|
def setup_pool(pool):
|
||||||
|
"""
|
||||||
|
Configure a connection multiplexer's :class:`mitogen.service.Pool` with
|
||||||
|
services accessed by clients and WorkerProcesses.
|
||||||
|
"""
|
||||||
|
pool.add(mitogen.service.FileService(router=pool.router))
|
||||||
|
pool.add(mitogen.service.PushFileService(router=pool.router))
|
||||||
|
pool.add(ansible_mitogen.services.ContextService(router=pool.router))
|
||||||
|
pool.add(ansible_mitogen.services.ModuleDepService(pool.router))
|
||||||
|
LOG.debug('Service pool configured: size=%d', pool.size)
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_responder(responder):
|
||||||
|
"""
|
||||||
|
Configure :class:`mitogen.master.ModuleResponder` to only permit
|
||||||
|
certain packages, and to generate custom responses for certain modules.
|
||||||
|
"""
|
||||||
|
responder.whitelist_prefix('ansible')
|
||||||
|
responder.whitelist_prefix('ansible_mitogen')
|
||||||
|
|
||||||
|
# Ansible 2.3 is compatible with Python 2.4 targets, however
|
||||||
|
# ansible/__init__.py is not. Instead, executor/module_common.py writes
|
||||||
|
# out a 2.4-compatible namespace package for unknown reasons. So we
|
||||||
|
# copy it here.
|
||||||
|
responder.add_source_override(
|
||||||
|
fullname='ansible',
|
||||||
|
path=ansible.__file__,
|
||||||
|
source=(ANSIBLE_PKG_OVERRIDE % (
|
||||||
|
ansible.__version__,
|
||||||
|
ansible.__author__,
|
||||||
|
)).encode(),
|
||||||
|
is_pkg=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def increase_open_file_limit():
|
||||||
|
"""
|
||||||
|
#549: in order to reduce the possibility of hitting an open files limit,
|
||||||
|
increase :data:`resource.RLIMIT_NOFILE` from its soft limit to its hard
|
||||||
|
limit, if they differ.
|
||||||
|
|
||||||
|
It is common that a low soft limit is configured by default, where the hard
|
||||||
|
limit is much higher.
|
||||||
|
"""
|
||||||
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||||
|
if hard == resource.RLIM_INFINITY:
|
||||||
|
hard_s = '(infinity)'
|
||||||
|
# cap in case of O(RLIMIT_NOFILE) algorithm in some subprocess.
|
||||||
|
hard = 524288
|
||||||
|
else:
|
||||||
|
hard_s = str(hard)
|
||||||
|
|
||||||
|
LOG.debug('inherited open file limits: soft=%d hard=%s', soft, hard_s)
|
||||||
|
if soft >= hard:
|
||||||
|
LOG.debug('max open files already set to hard limit: %d', hard)
|
||||||
|
return
|
||||||
|
|
||||||
|
# OS X is limited by kern.maxfilesperproc sysctl, rather than the
|
||||||
|
# advertised unlimited hard RLIMIT_NOFILE. Just hard-wire known defaults
|
||||||
|
# for that sysctl, to avoid the mess of querying it.
|
||||||
|
for value in (hard, 10240):
|
||||||
|
try:
|
||||||
|
resource.setrlimit(resource.RLIMIT_NOFILE, (value, hard))
|
||||||
|
LOG.debug('raised soft open file limit from %d to %d', soft, value)
|
||||||
|
break
|
||||||
|
except ValueError as e:
|
||||||
|
LOG.debug('could not raise soft open file limit from %d to %d: %s',
|
||||||
|
soft, value, e)
|
||||||
|
|
||||||
|
|
||||||
|
def common_setup(enable_affinity=True, _init_logging=True):
|
||||||
|
save_pid('controller')
|
||||||
|
ansible_mitogen.logging.set_process_name('top')
|
||||||
|
|
||||||
|
if _init_logging:
|
||||||
|
ansible_mitogen.logging.setup()
|
||||||
|
|
||||||
|
if enable_affinity:
|
||||||
|
ansible_mitogen.affinity.policy.assign_controller()
|
||||||
|
|
||||||
|
mitogen.utils.setup_gil()
|
||||||
|
if faulthandler is not None:
|
||||||
|
faulthandler.enable()
|
||||||
|
|
||||||
|
MuxProcess.profiling = getenv_int('MITOGEN_PROFILING') > 0
|
||||||
|
if MuxProcess.profiling:
|
||||||
|
mitogen.core.enable_profiling()
|
||||||
|
|
||||||
|
MuxProcess.cls_original_env = dict(os.environ)
|
||||||
|
increase_open_file_limit()
|
||||||
|
|
||||||
|
|
||||||
|
def get_cpu_count(default=None):
|
||||||
|
"""
|
||||||
|
Get the multiplexer CPU count from the MITOGEN_CPU_COUNT environment
|
||||||
|
variable, returning `default` if one isn't set, or is out of range.
|
||||||
|
|
||||||
|
:param int default:
|
||||||
|
Default CPU, or :data:`None` to use all available CPUs.
|
||||||
|
"""
|
||||||
|
max_cpus = multiprocessing.cpu_count()
|
||||||
|
if default is None:
|
||||||
|
default = max_cpus
|
||||||
|
|
||||||
|
cpu_count = getenv_int('MITOGEN_CPU_COUNT', default=default)
|
||||||
|
if cpu_count < 1 or cpu_count > max_cpus:
|
||||||
|
cpu_count = default
|
||||||
|
|
||||||
|
return cpu_count
|
||||||
|
|
||||||
|
|
||||||
|
class Broker(mitogen.master.Broker):
|
||||||
|
"""
|
||||||
|
WorkerProcess maintains fewer file descriptors, therefore does not need
|
||||||
|
the exuberant syscall expense of EpollPoller, so override it and restore
|
||||||
|
the poll() poller.
|
||||||
|
"""
|
||||||
|
poller_class = mitogen.parent.POLLER_LIGHTWEIGHT
|
||||||
|
|
||||||
|
|
||||||
|
class Binding(object):
|
||||||
|
"""
|
||||||
|
Represent a bound connection for a particular inventory hostname. When
|
||||||
|
operating in sharded mode, the actual MuxProcess implementing a connection
|
||||||
|
varies according to the target machine. Depending on the particular
|
||||||
|
implementation, this class represents a binding to the correct MuxProcess.
|
||||||
|
"""
|
||||||
|
def get_child_service_context(self):
|
||||||
|
"""
|
||||||
|
Return the :class:`mitogen.core.Context` to which children should
|
||||||
|
direct requests for services such as FileService, or :data:`None` for
|
||||||
|
the local process.
|
||||||
|
|
||||||
|
This can be different from :meth:`get_service_context` where MuxProcess
|
||||||
|
and WorkerProcess are combined, and it is discovered a task is
|
||||||
|
delegated after being assigned to its initial worker for the original
|
||||||
|
un-delegated hostname. In that case, connection management and
|
||||||
|
expensive services like file transfer must be implemented by the
|
||||||
|
MuxProcess connected to the target, rather than routed to the
|
||||||
|
MuxProcess responsible for executing the task.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_service_context(self):
|
||||||
|
"""
|
||||||
|
Return the :class:`mitogen.core.Context` to which this process should
|
||||||
|
direct ContextService requests, or :data:`None` for the local process.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""
|
||||||
|
Finalize any associated resources.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerModel(object):
|
||||||
|
"""
|
||||||
|
Interface used by StrategyMixin to manage various Mitogen services, by
|
||||||
|
default running in one or more connection multiplexer subprocesses spawned
|
||||||
|
off the top-level Ansible process.
|
||||||
|
"""
|
||||||
|
def on_strategy_start(self):
|
||||||
|
"""
|
||||||
|
Called prior to strategy start in the top-level process. Responsible
|
||||||
|
for preparing any worker/connection multiplexer state.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def on_strategy_complete(self):
|
||||||
|
"""
|
||||||
|
Called after strategy completion in the top-level process. Must place
|
||||||
|
Ansible back in a "compatible" state where any other strategy plug-in
|
||||||
|
may execute.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_binding(self, inventory_name):
|
||||||
|
"""
|
||||||
|
Return a :class:`Binding` to access Mitogen services for
|
||||||
|
`inventory_name`. Usually called from worker processes, but may also be
|
||||||
|
called from top-level process to handle "meta: reset_connection".
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class ClassicBinding(Binding):
|
||||||
|
"""
|
||||||
|
Only one connection may be active at a time in a classic worker, so its
|
||||||
|
binding just provides forwarders back to :class:`ClassicWorkerModel`.
|
||||||
|
"""
|
||||||
|
def __init__(self, model):
|
||||||
|
self.model = model
|
||||||
|
|
||||||
|
def get_service_context(self):
|
||||||
|
"""
|
||||||
|
See Binding.get_service_context().
|
||||||
|
"""
|
||||||
|
return self.model.parent
|
||||||
|
|
||||||
|
def get_child_service_context(self):
|
||||||
|
"""
|
||||||
|
See Binding.get_child_service_context().
|
||||||
|
"""
|
||||||
|
return self.model.parent
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""
|
||||||
|
See Binding.close().
|
||||||
|
"""
|
||||||
|
self.model.on_binding_close()
|
||||||
|
|
||||||
|
|
||||||
|
class ClassicWorkerModel(WorkerModel):
|
||||||
|
#: In the top-level process, this references one end of a socketpair(),
|
||||||
|
#: whose other end child MuxProcesses block reading from to determine when
|
||||||
|
#: the master process dies. When the top-level exits abnormally, or
|
||||||
|
#: normally but where :func:`_on_process_exit` has been called, this socket
|
||||||
|
#: will be closed, causing all the children to wake.
|
||||||
|
parent_sock = None
|
||||||
|
|
||||||
|
#: In the mux process, this is the other end of :attr:`cls_parent_sock`.
|
||||||
|
#: The main thread blocks on a read from it until :attr:`cls_parent_sock`
|
||||||
|
#: is closed.
|
||||||
|
child_sock = None
|
||||||
|
|
||||||
|
#: mitogen.master.Router for this worker.
|
||||||
|
router = None
|
||||||
|
|
||||||
|
#: mitogen.master.Broker for this worker.
|
||||||
|
broker = None
|
||||||
|
|
||||||
|
#: Name of multiplexer process socket we are currently connected to.
|
||||||
|
listener_path = None
|
||||||
|
|
||||||
|
#: mitogen.parent.Context representing the parent Context, which is the
|
||||||
|
#: connection multiplexer process when running in classic mode, or the
|
||||||
|
#: top-level process when running a new-style mode.
|
||||||
|
parent = None
|
||||||
|
|
||||||
|
def __init__(self, _init_logging=True):
|
||||||
|
"""
|
||||||
|
Arrange for classic model multiplexers to be started. The parent choses
|
||||||
|
UNIX socket paths each child will use prior to fork, creates a
|
||||||
|
socketpair used essentially as a semaphore, then blocks waiting for the
|
||||||
|
child to indicate the UNIX socket is ready for use.
|
||||||
|
|
||||||
|
:param bool _init_logging:
|
||||||
|
For testing, if :data:`False`, don't initialize logging.
|
||||||
|
"""
|
||||||
|
# #573: The process ID that installed the :mod:`atexit` handler. If
|
||||||
|
# some unknown Ansible plug-in forks the Ansible top-level process and
|
||||||
|
# later performs a graceful Python exit, it may try to wait for child
|
||||||
|
# PIDs it never owned, causing a crash. We want to avoid that.
|
||||||
|
self._pid = os.getpid()
|
||||||
|
|
||||||
|
common_setup(_init_logging=_init_logging)
|
||||||
|
|
||||||
|
self.parent_sock, self.child_sock = socket.socketpair()
|
||||||
|
mitogen.core.set_cloexec(self.parent_sock.fileno())
|
||||||
|
mitogen.core.set_cloexec(self.child_sock.fileno())
|
||||||
|
|
||||||
|
self._muxes = [
|
||||||
|
MuxProcess(self, index)
|
||||||
|
for index in range(get_cpu_count(default=1))
|
||||||
|
]
|
||||||
|
for mux in self._muxes:
|
||||||
|
mux.start()
|
||||||
|
|
||||||
|
atexit.register(self._on_process_exit)
|
||||||
|
self.child_sock.close()
|
||||||
|
self.child_sock = None
|
||||||
|
|
||||||
|
def _listener_for_name(self, name):
|
||||||
|
"""
|
||||||
|
Given an inventory hostname, return the UNIX listener that should
|
||||||
|
communicate with it. This is a simple hash of the inventory name.
|
||||||
|
"""
|
||||||
|
mux = self._muxes[abs(hash(name)) % len(self._muxes)]
|
||||||
|
LOG.debug('will use multiplexer %d (%s) to connect to "%s"',
|
||||||
|
mux.index, mux.path, name)
|
||||||
|
return mux.path
|
||||||
|
|
||||||
|
def _reconnect(self, path):
|
||||||
|
if self.router is not None:
|
||||||
|
# Router can just be overwritten, but the previous parent
|
||||||
|
# connection must explicitly be removed from the broker first.
|
||||||
|
self.router.disconnect(self.parent)
|
||||||
|
self.parent = None
|
||||||
|
self.router = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.router, self.parent = mitogen.unix.connect(
|
||||||
|
path=path,
|
||||||
|
broker=self.broker,
|
||||||
|
)
|
||||||
|
except mitogen.unix.ConnectError as e:
|
||||||
|
# This is not AnsibleConnectionFailure since we want to break
|
||||||
|
# with_items loops.
|
||||||
|
raise ansible.errors.AnsibleError(shutting_down_msg % (e,))
|
||||||
|
|
||||||
|
self.router.max_message_size = MAX_MESSAGE_SIZE
|
||||||
|
self.listener_path = path
|
||||||
|
|
||||||
|
def _on_process_exit(self):
|
||||||
|
"""
|
||||||
|
This is an :mod:`atexit` handler installed in the top-level process.
|
||||||
|
|
||||||
|
Shut the write end of `sock`, causing the receive side of the socket in
|
||||||
|
every :class:`MuxProcess` to return 0-byte reads, and causing their
|
||||||
|
main threads to wake and initiate shutdown. After shutting the socket
|
||||||
|
down, wait on each child to finish exiting.
|
||||||
|
|
||||||
|
This is done using :mod:`atexit` since Ansible lacks any better hook to
|
||||||
|
run code during exit, and unless some synchronization exists with
|
||||||
|
MuxProcess, debug logs may appear on the user's terminal *after* the
|
||||||
|
prompt has been printed.
|
||||||
|
"""
|
||||||
|
if self._pid != os.getpid():
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.parent_sock.shutdown(socket.SHUT_WR)
|
||||||
|
except socket.error:
|
||||||
|
# Already closed. This is possible when tests are running.
|
||||||
|
LOG.debug('_on_process_exit: ignoring duplicate call')
|
||||||
|
return
|
||||||
|
|
||||||
|
mitogen.core.io_op(self.parent_sock.recv, 1)
|
||||||
|
self.parent_sock.close()
|
||||||
|
|
||||||
|
for mux in self._muxes:
|
||||||
|
_, status = os.waitpid(mux.pid, 0)
|
||||||
|
status = mitogen.fork._convert_exit_status(status)
|
||||||
|
LOG.debug('multiplexer %d PID %d %s', mux.index, mux.pid,
|
||||||
|
mitogen.parent.returncode_to_str(status))
|
||||||
|
|
||||||
|
def _test_reset(self):
|
||||||
|
"""
|
||||||
|
Used to clean up in unit tests.
|
||||||
|
"""
|
||||||
|
self.on_binding_close()
|
||||||
|
self._on_process_exit()
|
||||||
|
set_worker_model(None)
|
||||||
|
|
||||||
|
global _classic_worker_model
|
||||||
|
_classic_worker_model = None
|
||||||
|
|
||||||
|
def on_strategy_start(self):
|
||||||
|
"""
|
||||||
|
See WorkerModel.on_strategy_start().
|
||||||
|
"""
|
||||||
|
|
||||||
|
def on_strategy_complete(self):
|
||||||
|
"""
|
||||||
|
See WorkerModel.on_strategy_complete().
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_binding(self, inventory_name):
|
||||||
|
"""
|
||||||
|
See WorkerModel.get_binding().
|
||||||
|
"""
|
||||||
|
if self.broker is None:
|
||||||
|
self.broker = Broker()
|
||||||
|
|
||||||
|
path = self._listener_for_name(inventory_name)
|
||||||
|
if path != self.listener_path:
|
||||||
|
self._reconnect(path)
|
||||||
|
|
||||||
|
return ClassicBinding(self)
|
||||||
|
|
||||||
|
def on_binding_close(self):
|
||||||
|
if not self.broker:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.broker.shutdown()
|
||||||
|
self.broker.join()
|
||||||
|
self.router = None
|
||||||
|
self.broker = None
|
||||||
|
self.parent = None
|
||||||
|
self.listener_path = None
|
||||||
|
|
||||||
|
# #420: Ansible executes "meta" actions in the top-level process,
|
||||||
|
# meaning "reset_connection" will cause :class:`mitogen.core.Latch` FDs
|
||||||
|
# to be cached and erroneously shared by children on subsequent
|
||||||
|
# WorkerProcess forks. To handle that, call on_fork() to ensure any
|
||||||
|
# shared state is discarded.
|
||||||
|
# #490: only attempt to clean up when it's known that some resources
|
||||||
|
# exist to cleanup, otherwise later __del__ double-call to close() due
|
||||||
|
# to GC at random moment may obliterate an unrelated Connection's
|
||||||
|
# related resources.
|
||||||
|
mitogen.fork.on_fork()
|
||||||
|
|
||||||
|
|
||||||
|
class MuxProcess(object):
|
||||||
|
"""
|
||||||
|
Implement a subprocess forked from the Ansible top-level, as a safe place
|
||||||
|
to contain the Mitogen IO multiplexer thread, keeping its use of the
|
||||||
|
logging package (and the logging package's heavy use of locks) far away
|
||||||
|
from os.fork(), which is used continuously by the multiprocessing package
|
||||||
|
in the top-level process.
|
||||||
|
|
||||||
|
The problem with running the multiplexer in that process is that should the
|
||||||
|
multiplexer thread be in the process of emitting a log entry (and holding
|
||||||
|
its lock) at the point of fork, in the child, the first attempt to log any
|
||||||
|
log entry using the same handler will deadlock the child, as in the memory
|
||||||
|
image the child received, the lock will always be marked held.
|
||||||
|
|
||||||
|
See https://bugs.python.org/issue6721 for a thorough description of the
|
||||||
|
class of problems this worker is intended to avoid.
|
||||||
|
"""
|
||||||
|
#: A copy of :data:`os.environ` at the time the multiplexer process was
|
||||||
|
#: started. It's used by mitogen_local.py to find changes made to the
|
||||||
|
#: top-level environment (e.g. vars plugins -- issue #297) that must be
|
||||||
|
#: applied to locally executed commands and modules.
|
||||||
|
cls_original_env = None
|
||||||
|
|
||||||
|
def __init__(self, model, index):
|
||||||
|
#: :class:`ClassicWorkerModel` instance we were created by.
|
||||||
|
self.model = model
|
||||||
|
#: MuxProcess CPU index.
|
||||||
|
self.index = index
|
||||||
|
#: Individual path of this process.
|
||||||
|
self.path = mitogen.unix.make_socket_path()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.pid = os.fork()
|
||||||
|
if self.pid:
|
||||||
|
# Wait for child to boot before continuing.
|
||||||
|
mitogen.core.io_op(self.model.parent_sock.recv, 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
ansible_mitogen.logging.set_process_name('mux:' + str(self.index))
|
||||||
|
if setproctitle:
|
||||||
|
setproctitle.setproctitle('mitogen mux:%s (%s)' % (
|
||||||
|
self.index,
|
||||||
|
os.path.basename(self.path),
|
||||||
|
))
|
||||||
|
|
||||||
|
self.model.parent_sock.close()
|
||||||
|
self.model.parent_sock = None
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
self.worker_main()
|
||||||
|
except Exception:
|
||||||
|
LOG.exception('worker_main() crashed')
|
||||||
|
finally:
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
def worker_main(self):
|
||||||
|
"""
|
||||||
|
The main function of the mux process: setup the Mitogen broker thread
|
||||||
|
and ansible_mitogen services, then sleep waiting for the socket
|
||||||
|
connected to the parent to be closed (indicating the parent has died).
|
||||||
|
"""
|
||||||
|
save_pid('mux')
|
||||||
|
|
||||||
|
# #623: MuxProcess ignores SIGINT because it wants to live until every
|
||||||
|
# Ansible worker process has been cleaned up by
|
||||||
|
# TaskQueueManager.cleanup(), otherwise harmles yet scary warnings
|
||||||
|
# about being unable connect to MuxProess could be printed.
|
||||||
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
ansible_mitogen.logging.set_process_name('mux')
|
||||||
|
ansible_mitogen.affinity.policy.assign_muxprocess(self.index)
|
||||||
|
|
||||||
|
self._setup_master()
|
||||||
|
self._setup_services()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Let the parent know our listening socket is ready.
|
||||||
|
mitogen.core.io_op(self.model.child_sock.send, b('1'))
|
||||||
|
# Block until the socket is closed, which happens on parent exit.
|
||||||
|
mitogen.core.io_op(self.model.child_sock.recv, 1)
|
||||||
|
finally:
|
||||||
|
self.broker.shutdown()
|
||||||
|
self.broker.join()
|
||||||
|
|
||||||
|
# Test frameworks living somewhere higher on the stack of the
|
||||||
|
# original parent process may try to catch sys.exit(), so do a C
|
||||||
|
# level exit instead.
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
|
def _enable_router_debug(self):
|
||||||
|
if 'MITOGEN_ROUTER_DEBUG' in os.environ:
|
||||||
|
self.router.enable_debug()
|
||||||
|
|
||||||
|
def _enable_stack_dumps(self):
|
||||||
|
secs = getenv_int('MITOGEN_DUMP_THREAD_STACKS', default=0)
|
||||||
|
if secs:
|
||||||
|
mitogen.debug.dump_to_logger(secs=secs)
|
||||||
|
|
||||||
|
def _setup_master(self):
|
||||||
|
"""
|
||||||
|
Construct a Router, Broker, and mitogen.unix listener
|
||||||
|
"""
|
||||||
|
self.broker = mitogen.master.Broker(install_watcher=False)
|
||||||
|
self.router = mitogen.master.Router(
|
||||||
|
broker=self.broker,
|
||||||
|
max_message_size=MAX_MESSAGE_SIZE,
|
||||||
|
)
|
||||||
|
_setup_responder(self.router.responder)
|
||||||
|
mitogen.core.listen(self.broker, 'shutdown', self._on_broker_shutdown)
|
||||||
|
mitogen.core.listen(self.broker, 'exit', self._on_broker_exit)
|
||||||
|
self.listener = mitogen.unix.Listener.build_stream(
|
||||||
|
router=self.router,
|
||||||
|
path=self.path,
|
||||||
|
backlog=C.DEFAULT_FORKS,
|
||||||
|
)
|
||||||
|
self._enable_router_debug()
|
||||||
|
self._enable_stack_dumps()
|
||||||
|
|
||||||
|
def _setup_services(self):
|
||||||
|
"""
|
||||||
|
Construct a ContextService and a thread to service requests for it
|
||||||
|
arriving from worker processes.
|
||||||
|
"""
|
||||||
|
self.pool = mitogen.service.Pool(
|
||||||
|
router=self.router,
|
||||||
|
size=getenv_int('MITOGEN_POOL_SIZE', default=32),
|
||||||
|
)
|
||||||
|
setup_pool(self.pool)
|
||||||
|
|
||||||
|
def _on_broker_shutdown(self):
|
||||||
|
"""
|
||||||
|
Respond to broker shutdown by shutting down the pool. Do not join on it
|
||||||
|
yet, since that would block the broker thread which then cannot clean
|
||||||
|
up pending handlers and connections, which is required for the threads
|
||||||
|
to exit gracefully.
|
||||||
|
"""
|
||||||
|
self.pool.stop(join=False)
|
||||||
|
|
||||||
|
def _on_broker_exit(self):
|
||||||
|
"""
|
||||||
|
Respond to the broker thread about to exit by finally joining on the
|
||||||
|
pool. This is safe since pools only block in connection attempts, and
|
||||||
|
connection attempts fail with CancelledError when broker shutdown
|
||||||
|
begins.
|
||||||
|
"""
|
||||||
|
self.pool.join()
|
1101
mitogen-0.3.9/ansible_mitogen/runner.py
Normal file
1101
mitogen-0.3.9/ansible_mitogen/runner.py
Normal file
File diff suppressed because it is too large
Load Diff
565
mitogen-0.3.9/ansible_mitogen/services.py
Normal file
565
mitogen-0.3.9/ansible_mitogen/services.py
Normal file
@ -0,0 +1,565 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
"""
|
||||||
|
Classes in this file define Mitogen 'services' that run (initially) within the
|
||||||
|
connection multiplexer process that is forked off the top-level controller
|
||||||
|
process.
|
||||||
|
|
||||||
|
Once a worker process connects to a multiplexer process
|
||||||
|
(Connection._connect()), it communicates with these services to establish new
|
||||||
|
connections, grant access to files by children, and register for notification
|
||||||
|
when a child has completed a job.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import ansible.constants
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.service
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.module_finder
|
||||||
|
import ansible_mitogen.target
|
||||||
|
import ansible_mitogen.utils.unsafe
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Force load of plugin to ensure ConfigManager has definitions loaded. Done
|
||||||
|
# during module import to ensure a single-threaded environment; PluginLoader
|
||||||
|
# is not thread-safe.
|
||||||
|
ansible_mitogen.loaders.shell_loader.get('sh')
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info[0] == 3:
|
||||||
|
def reraise(tp, value, tb):
|
||||||
|
if value is None:
|
||||||
|
value = tp()
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
else:
|
||||||
|
exec(
|
||||||
|
"def reraise(tp, value, tb=None):\n"
|
||||||
|
" raise tp, value, tb\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_candidate_temp_dirs():
|
||||||
|
try:
|
||||||
|
# >=2.5
|
||||||
|
options = ansible.constants.config.get_plugin_options('shell', 'sh')
|
||||||
|
remote_tmp = options.get('remote_tmp') or ansible.constants.DEFAULT_REMOTE_TMP
|
||||||
|
system_tmpdirs = options.get('system_tmpdirs', ('/var/tmp', '/tmp'))
|
||||||
|
except AttributeError:
|
||||||
|
# 2.3
|
||||||
|
remote_tmp = ansible.constants.DEFAULT_REMOTE_TMP
|
||||||
|
system_tmpdirs = ('/var/tmp', '/tmp')
|
||||||
|
|
||||||
|
return ansible_mitogen.utils.unsafe.cast([remote_tmp] + list(system_tmpdirs))
|
||||||
|
|
||||||
|
|
||||||
|
def key_from_dict(**kwargs):
|
||||||
|
"""
|
||||||
|
Return a unique string representation of a dict as quickly as possible.
|
||||||
|
Used to generated deduplication keys from a request.
|
||||||
|
"""
|
||||||
|
out = []
|
||||||
|
stack = [kwargs]
|
||||||
|
while stack:
|
||||||
|
obj = stack.pop()
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
stack.extend(sorted(obj.items()))
|
||||||
|
elif isinstance(obj, (list, tuple)):
|
||||||
|
stack.extend(obj)
|
||||||
|
else:
|
||||||
|
out.append(str(obj))
|
||||||
|
return ''.join(out)
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ContextService(mitogen.service.Service):
|
||||||
|
"""
|
||||||
|
Used by workers to fetch the single Context instance corresponding to a
|
||||||
|
connection configuration, creating the matching connection if it does not
|
||||||
|
exist.
|
||||||
|
|
||||||
|
For connection methods and their parameters, see:
|
||||||
|
https://mitogen.readthedocs.io/en/latest/api.html#context-factories
|
||||||
|
|
||||||
|
This concentrates connections in the top-level process, which may become a
|
||||||
|
bottleneck. The bottleneck can be removed using per-CPU connection
|
||||||
|
processes and arranging for the worker to select one according to a hash of
|
||||||
|
the connection parameters (sharding).
|
||||||
|
"""
|
||||||
|
max_interpreters = int(os.getenv('MITOGEN_MAX_INTERPRETERS', '20'))
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(ContextService, self).__init__(*args, **kwargs)
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
#: Records the :meth:`get` result dict for successful calls, returned
|
||||||
|
#: for identical subsequent calls. Keyed by :meth:`key_from_dict`.
|
||||||
|
self._response_by_key = {}
|
||||||
|
#: List of :class:`mitogen.core.Latch` awaiting the result for a
|
||||||
|
#: particular key.
|
||||||
|
self._latches_by_key = {}
|
||||||
|
#: Mapping of :class:`mitogen.core.Context` -> reference count. Each
|
||||||
|
#: call to :meth:`get` increases this by one. Calls to :meth:`put`
|
||||||
|
#: decrease it by one.
|
||||||
|
self._refs_by_context = {}
|
||||||
|
#: List of contexts in creation order by via= parameter. When
|
||||||
|
#: :attr:`max_interpreters` is reached, the most recently used context
|
||||||
|
#: is destroyed to make room for any additional context.
|
||||||
|
self._lru_by_via = {}
|
||||||
|
#: :func:`key_from_dict` result by Context.
|
||||||
|
self._key_by_context = {}
|
||||||
|
#: Mapping of Context -> parent Context
|
||||||
|
self._via_by_context = {}
|
||||||
|
|
||||||
|
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||||
|
@mitogen.service.arg_spec({
|
||||||
|
'stack': list,
|
||||||
|
})
|
||||||
|
def reset(self, stack):
|
||||||
|
"""
|
||||||
|
Return a reference, forcing close and discard of the underlying
|
||||||
|
connection. Used for 'meta: reset_connection' or when some other error
|
||||||
|
is detected.
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
:data:`True` if a connection was found to discard, otherwise
|
||||||
|
:data:`False`.
|
||||||
|
"""
|
||||||
|
LOG.debug('%r.reset(%r)', self, stack)
|
||||||
|
|
||||||
|
# this could happen if we have a `shutdown -r` shell command
|
||||||
|
# and then a `wait_for_connection` right afterwards
|
||||||
|
# in this case, we have no stack to disconnect from
|
||||||
|
if not stack:
|
||||||
|
return False
|
||||||
|
|
||||||
|
l = mitogen.core.Latch()
|
||||||
|
context = None
|
||||||
|
with self._lock:
|
||||||
|
for i, spec in enumerate(stack):
|
||||||
|
key = key_from_dict(via=context, **spec)
|
||||||
|
response = self._response_by_key.get(key)
|
||||||
|
if response is None:
|
||||||
|
LOG.debug('%r: could not find connection to shut down; '
|
||||||
|
'failed at hop %d', self, i)
|
||||||
|
return False
|
||||||
|
|
||||||
|
context = response['context']
|
||||||
|
|
||||||
|
mitogen.core.listen(context, 'disconnect', l.put)
|
||||||
|
self._shutdown_unlocked(context)
|
||||||
|
|
||||||
|
# The timeout below is to turn a hang into a crash in case there is any
|
||||||
|
# possible race between 'disconnect' signal subscription, and the child
|
||||||
|
# abruptly disconnecting.
|
||||||
|
l.get(timeout=30.0)
|
||||||
|
return True
|
||||||
|
|
||||||
|
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||||
|
@mitogen.service.arg_spec({
|
||||||
|
'context': mitogen.core.Context
|
||||||
|
})
|
||||||
|
def put(self, context):
|
||||||
|
"""
|
||||||
|
Return a reference, making it eligable for recycling once its reference
|
||||||
|
count reaches zero.
|
||||||
|
"""
|
||||||
|
LOG.debug('decrementing reference count for %r', context)
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
if self._refs_by_context.get(context, 0) == 0:
|
||||||
|
LOG.warning('%r.put(%r): refcount was 0. shutdown_all called?',
|
||||||
|
self, context)
|
||||||
|
return
|
||||||
|
self._refs_by_context[context] -= 1
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
def _produce_response(self, key, response):
|
||||||
|
"""
|
||||||
|
Reply to every waiting request matching a configuration key with a
|
||||||
|
response dictionary, deleting the list of waiters when done.
|
||||||
|
|
||||||
|
:param str key:
|
||||||
|
Result of :meth:`key_from_dict`
|
||||||
|
:param dict response:
|
||||||
|
Response dictionary
|
||||||
|
:returns:
|
||||||
|
Number of waiters that were replied to.
|
||||||
|
"""
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
latches = self._latches_by_key.pop(key)
|
||||||
|
count = len(latches)
|
||||||
|
for latch in latches:
|
||||||
|
latch.put(response)
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
return count
|
||||||
|
|
||||||
|
def _forget_context_unlocked(self, context):
|
||||||
|
key = self._key_by_context.get(context)
|
||||||
|
if key is None:
|
||||||
|
LOG.debug('%r: attempt to forget unknown %r', self, context)
|
||||||
|
return
|
||||||
|
|
||||||
|
self._response_by_key.pop(key, None)
|
||||||
|
self._latches_by_key.pop(key, None)
|
||||||
|
self._key_by_context.pop(context, None)
|
||||||
|
self._refs_by_context.pop(context, None)
|
||||||
|
self._via_by_context.pop(context, None)
|
||||||
|
self._lru_by_via.pop(context, None)
|
||||||
|
|
||||||
|
def _shutdown_unlocked(self, context, lru=None, new_context=None):
|
||||||
|
"""
|
||||||
|
Arrange for `context` to be shut down, and optionally add `new_context`
|
||||||
|
to the LRU list while holding the lock.
|
||||||
|
"""
|
||||||
|
LOG.info('%r._shutdown_unlocked(): shutting down %r', self, context)
|
||||||
|
context.shutdown()
|
||||||
|
via = self._via_by_context.get(context)
|
||||||
|
if via:
|
||||||
|
lru = self._lru_by_via.get(via)
|
||||||
|
if lru:
|
||||||
|
if context in lru:
|
||||||
|
lru.remove(context)
|
||||||
|
if new_context:
|
||||||
|
lru.append(new_context)
|
||||||
|
self._forget_context_unlocked(context)
|
||||||
|
|
||||||
|
def _update_lru_unlocked(self, new_context, spec, via):
|
||||||
|
"""
|
||||||
|
Update the LRU ("MRU"?) list associated with the connection described
|
||||||
|
by `kwargs`, destroying the most recently created context if the list
|
||||||
|
is full. Finally add `new_context` to the list.
|
||||||
|
"""
|
||||||
|
self._via_by_context[new_context] = via
|
||||||
|
|
||||||
|
lru = self._lru_by_via.setdefault(via, [])
|
||||||
|
if len(lru) < self.max_interpreters:
|
||||||
|
lru.append(new_context)
|
||||||
|
return
|
||||||
|
|
||||||
|
for context in reversed(lru):
|
||||||
|
if self._refs_by_context[context] == 0:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
LOG.warning('via=%r reached maximum number of interpreters, '
|
||||||
|
'but they are all marked as in-use.', via)
|
||||||
|
return
|
||||||
|
|
||||||
|
self._shutdown_unlocked(context, lru=lru, new_context=new_context)
|
||||||
|
|
||||||
|
def _update_lru(self, new_context, spec, via):
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
self._update_lru_unlocked(new_context, spec, via)
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||||
|
def dump(self):
|
||||||
|
"""
|
||||||
|
For testing, return a list of dicts describing every currently
|
||||||
|
connected context.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
'context_name': context.name,
|
||||||
|
'via': getattr(self._via_by_context.get(context),
|
||||||
|
'name', None),
|
||||||
|
'refs': self._refs_by_context.get(context),
|
||||||
|
}
|
||||||
|
for context, key in sorted(self._key_by_context.items(),
|
||||||
|
key=lambda c_k: c_k[0].context_id)
|
||||||
|
]
|
||||||
|
|
||||||
|
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||||
|
def shutdown_all(self):
|
||||||
|
"""
|
||||||
|
For testing use, arrange for all connections to be shut down.
|
||||||
|
"""
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
for context in list(self._key_by_context):
|
||||||
|
self._shutdown_unlocked(context)
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
def _on_context_disconnect(self, context):
|
||||||
|
"""
|
||||||
|
Respond to Context disconnect event by deleting any record of the no
|
||||||
|
longer reachable context. This method runs in the Broker thread and
|
||||||
|
must not to block.
|
||||||
|
"""
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
LOG.info('%r: Forgetting %r due to stream disconnect', self, context)
|
||||||
|
self._forget_context_unlocked(context)
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
ALWAYS_PRELOAD = (
|
||||||
|
'ansible.module_utils.basic',
|
||||||
|
'ansible.module_utils.json_utils',
|
||||||
|
'ansible.release',
|
||||||
|
'ansible_mitogen.runner',
|
||||||
|
'ansible_mitogen.target',
|
||||||
|
'mitogen.fork',
|
||||||
|
'mitogen.service',
|
||||||
|
)
|
||||||
|
|
||||||
|
def _send_module_forwards(self, context):
|
||||||
|
if hasattr(self.router.responder, 'forward_modules'):
|
||||||
|
self.router.responder.forward_modules(context, self.ALWAYS_PRELOAD)
|
||||||
|
|
||||||
|
_candidate_temp_dirs = None
|
||||||
|
|
||||||
|
def _get_candidate_temp_dirs(self):
|
||||||
|
"""
|
||||||
|
Return a list of locations to try to create the single temporary
|
||||||
|
directory used by the run. This simply caches the (expensive) plugin
|
||||||
|
load of :func:`_get_candidate_temp_dirs`.
|
||||||
|
"""
|
||||||
|
if self._candidate_temp_dirs is None:
|
||||||
|
self._candidate_temp_dirs = _get_candidate_temp_dirs()
|
||||||
|
return self._candidate_temp_dirs
|
||||||
|
|
||||||
|
def _connect(self, key, spec, via=None):
|
||||||
|
"""
|
||||||
|
Actual connect implementation. Arranges for the Mitogen connection to
|
||||||
|
be created and enqueues an asynchronous call to start the forked task
|
||||||
|
parent in the remote context.
|
||||||
|
|
||||||
|
:param key:
|
||||||
|
Deduplication key representing the connection configuration.
|
||||||
|
:param spec:
|
||||||
|
Connection specification.
|
||||||
|
:returns:
|
||||||
|
Dict like::
|
||||||
|
|
||||||
|
{
|
||||||
|
'context': mitogen.core.Context or None,
|
||||||
|
'via': mitogen.core.Context or None,
|
||||||
|
'init_child_result': {
|
||||||
|
'fork_context': mitogen.core.Context,
|
||||||
|
'home_dir': str or None,
|
||||||
|
},
|
||||||
|
'msg': str or None
|
||||||
|
}
|
||||||
|
|
||||||
|
Where `context` is a reference to the newly constructed context,
|
||||||
|
`init_child_result` is the result of executing
|
||||||
|
:func:`ansible_mitogen.target.init_child` in that context, `msg` is
|
||||||
|
an error message and the remaining fields are :data:`None`, or
|
||||||
|
`msg` is :data:`None` and the remaining fields are set.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
method = getattr(self.router, spec['method'])
|
||||||
|
except AttributeError:
|
||||||
|
raise Error('unsupported method: %(method)s' % spec)
|
||||||
|
|
||||||
|
context = method(via=via, unidirectional=True, **spec['kwargs'])
|
||||||
|
if via and spec.get('enable_lru'):
|
||||||
|
self._update_lru(context, spec, via)
|
||||||
|
|
||||||
|
# Forget the context when its disconnect event fires.
|
||||||
|
mitogen.core.listen(context, 'disconnect',
|
||||||
|
lambda: self._on_context_disconnect(context))
|
||||||
|
|
||||||
|
self._send_module_forwards(context)
|
||||||
|
init_child_result = context.call(
|
||||||
|
ansible_mitogen.target.init_child,
|
||||||
|
log_level=LOG.getEffectiveLevel(),
|
||||||
|
candidate_temp_dirs=self._get_candidate_temp_dirs(),
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'):
|
||||||
|
from mitogen import debug
|
||||||
|
context.call(debug.dump_to_logger)
|
||||||
|
|
||||||
|
self._key_by_context[context] = key
|
||||||
|
self._refs_by_context[context] = 0
|
||||||
|
return {
|
||||||
|
'context': context,
|
||||||
|
'via': via,
|
||||||
|
'init_child_result': init_child_result,
|
||||||
|
'msg': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _wait_or_start(self, spec, via=None):
|
||||||
|
latch = mitogen.core.Latch()
|
||||||
|
key = key_from_dict(via=via, **spec)
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
response = self._response_by_key.get(key)
|
||||||
|
if response is not None:
|
||||||
|
self._refs_by_context[response['context']] += 1
|
||||||
|
latch.put(response)
|
||||||
|
return latch
|
||||||
|
|
||||||
|
latches = self._latches_by_key.setdefault(key, [])
|
||||||
|
first = len(latches) == 0
|
||||||
|
latches.append(latch)
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
if first:
|
||||||
|
# I'm the first requestee, so I will create the connection.
|
||||||
|
try:
|
||||||
|
response = self._connect(key, spec, via=via)
|
||||||
|
count = self._produce_response(key, response)
|
||||||
|
# Only record the response for non-error results.
|
||||||
|
self._response_by_key[key] = response
|
||||||
|
# Set the reference count to the number of waiters.
|
||||||
|
self._refs_by_context[response['context']] += count
|
||||||
|
except Exception:
|
||||||
|
self._produce_response(key, sys.exc_info())
|
||||||
|
|
||||||
|
return latch
|
||||||
|
|
||||||
|
disconnect_msg = (
|
||||||
|
'Channel was disconnected while connection attempt was in progress; '
|
||||||
|
'this may be caused by an abnormal Ansible exit, or due to an '
|
||||||
|
'unreliable target.'
|
||||||
|
)
|
||||||
|
|
||||||
|
@mitogen.service.expose(mitogen.service.AllowParents())
|
||||||
|
@mitogen.service.arg_spec({
|
||||||
|
'stack': list
|
||||||
|
})
|
||||||
|
def get(self, stack):
|
||||||
|
"""
|
||||||
|
Return a Context referring to an established connection with the given
|
||||||
|
configuration, establishing new connections as necessary.
|
||||||
|
|
||||||
|
:param list stack:
|
||||||
|
Connection descriptions. Each element is a dict containing 'method'
|
||||||
|
and 'kwargs' keys describing the Router method and arguments.
|
||||||
|
Subsequent elements are proxied via the previous.
|
||||||
|
|
||||||
|
:returns dict:
|
||||||
|
* context: mitogen.parent.Context or None.
|
||||||
|
* init_child_result: Result of :func:`init_child`.
|
||||||
|
* msg: StreamError exception text or None.
|
||||||
|
* method_name: string failing method name.
|
||||||
|
"""
|
||||||
|
via = None
|
||||||
|
for spec in stack:
|
||||||
|
try:
|
||||||
|
result = self._wait_or_start(spec, via=via).get()
|
||||||
|
if isinstance(result, tuple): # exc_info()
|
||||||
|
reraise(*result)
|
||||||
|
via = result['context']
|
||||||
|
except mitogen.core.ChannelError:
|
||||||
|
return {
|
||||||
|
'context': None,
|
||||||
|
'init_child_result': None,
|
||||||
|
'method_name': spec['method'],
|
||||||
|
'msg': self.disconnect_msg,
|
||||||
|
}
|
||||||
|
except mitogen.core.StreamError as e:
|
||||||
|
return {
|
||||||
|
'context': None,
|
||||||
|
'init_child_result': None,
|
||||||
|
'method_name': spec['method'],
|
||||||
|
'msg': str(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleDepService(mitogen.service.Service):
|
||||||
|
"""
|
||||||
|
Scan a new-style module and produce a cached mapping of module_utils names
|
||||||
|
to their resolved filesystem paths.
|
||||||
|
"""
|
||||||
|
invoker_class = mitogen.service.SerializedInvoker
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(ModuleDepService, self).__init__(*args, **kwargs)
|
||||||
|
self._cache = {}
|
||||||
|
|
||||||
|
def _get_builtin_names(self, builtin_path, resolved):
|
||||||
|
return [
|
||||||
|
mitogen.core.to_text(fullname)
|
||||||
|
for fullname, path, is_pkg in resolved
|
||||||
|
if os.path.abspath(path).startswith(builtin_path)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _get_custom_tups(self, builtin_path, resolved):
|
||||||
|
return [
|
||||||
|
(mitogen.core.to_text(fullname),
|
||||||
|
mitogen.core.to_text(path),
|
||||||
|
is_pkg)
|
||||||
|
for fullname, path, is_pkg in resolved
|
||||||
|
if not os.path.abspath(path).startswith(builtin_path)
|
||||||
|
]
|
||||||
|
|
||||||
|
@mitogen.service.expose(policy=mitogen.service.AllowParents())
|
||||||
|
@mitogen.service.arg_spec({
|
||||||
|
'module_name': mitogen.core.UnicodeType,
|
||||||
|
'module_path': mitogen.core.FsPathTypes,
|
||||||
|
'search_path': tuple,
|
||||||
|
'builtin_path': mitogen.core.FsPathTypes,
|
||||||
|
'context': mitogen.core.Context,
|
||||||
|
})
|
||||||
|
def scan(self, module_name, module_path, search_path, builtin_path, context):
|
||||||
|
key = (module_name, search_path)
|
||||||
|
if key not in self._cache:
|
||||||
|
resolved = ansible_mitogen.module_finder.scan(
|
||||||
|
module_name=module_name,
|
||||||
|
module_path=module_path,
|
||||||
|
search_path=tuple(search_path) + (builtin_path,),
|
||||||
|
)
|
||||||
|
builtin_path = os.path.abspath(builtin_path)
|
||||||
|
builtin = self._get_builtin_names(builtin_path, resolved)
|
||||||
|
custom = self._get_custom_tups(builtin_path, resolved)
|
||||||
|
self._cache[key] = {
|
||||||
|
'builtin': builtin,
|
||||||
|
'custom': custom,
|
||||||
|
}
|
||||||
|
return self._cache[key]
|
328
mitogen-0.3.9/ansible_mitogen/strategy.py
Normal file
328
mitogen-0.3.9/ansible_mitogen/strategy.py
Normal file
@ -0,0 +1,328 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import threading
|
||||||
|
|
||||||
|
try:
|
||||||
|
import setproctitle
|
||||||
|
except ImportError:
|
||||||
|
setproctitle = None
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import ansible_mitogen.affinity
|
||||||
|
import ansible_mitogen.loaders
|
||||||
|
import ansible_mitogen.mixins
|
||||||
|
import ansible_mitogen.process
|
||||||
|
|
||||||
|
import ansible.executor.process.worker
|
||||||
|
import ansible.utils.sentinel
|
||||||
|
|
||||||
|
|
||||||
|
def _patch_awx_callback():
|
||||||
|
"""
|
||||||
|
issue #400: AWX loads a display callback that suffers from thread-safety
|
||||||
|
issues. Detect the presence of older AWX versions and patch the bug.
|
||||||
|
"""
|
||||||
|
# AWX uses sitecustomize.py to force-load this package. If it exists, we're
|
||||||
|
# running under AWX.
|
||||||
|
try:
|
||||||
|
import awx_display_callback.events
|
||||||
|
except ImportError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if hasattr(awx_display_callback.events.EventContext(), '_local'):
|
||||||
|
# Patched version.
|
||||||
|
return
|
||||||
|
|
||||||
|
def patch_add_local(self, **kwargs):
|
||||||
|
tls = vars(self._local)
|
||||||
|
ctx = tls.setdefault('_ctx', {})
|
||||||
|
ctx.update(kwargs)
|
||||||
|
|
||||||
|
awx_display_callback.events.EventContext._local = threading.local()
|
||||||
|
awx_display_callback.events.EventContext.add_local = patch_add_local
|
||||||
|
|
||||||
|
_patch_awx_callback()
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_action_loader__get(name, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
While the mitogen strategy is active, trap action_loader.get() calls,
|
||||||
|
augmenting any fetched class with ActionModuleMixin, which replaces various
|
||||||
|
helper methods inherited from ActionBase with implementations that avoid
|
||||||
|
the use of shell fragments wherever possible.
|
||||||
|
|
||||||
|
This is used instead of static subclassing as it generalizes to third party
|
||||||
|
action plugins outside the Ansible tree.
|
||||||
|
"""
|
||||||
|
get_kwargs = {'class_only': True}
|
||||||
|
if name in ('fetch',):
|
||||||
|
name = 'mitogen_' + name
|
||||||
|
get_kwargs['collection_list'] = kwargs.pop('collection_list', None)
|
||||||
|
|
||||||
|
klass = ansible_mitogen.loaders.action_loader__get(name, **get_kwargs)
|
||||||
|
if klass:
|
||||||
|
bases = (ansible_mitogen.mixins.ActionModuleMixin, klass)
|
||||||
|
adorned_klass = type(str(name), bases, {})
|
||||||
|
if kwargs.get('class_only'):
|
||||||
|
return adorned_klass
|
||||||
|
return adorned_klass(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
REDIRECTED_CONNECTION_PLUGINS = (
|
||||||
|
'buildah',
|
||||||
|
'docker',
|
||||||
|
'kubectl',
|
||||||
|
'jail',
|
||||||
|
'local',
|
||||||
|
'lxc',
|
||||||
|
'lxd',
|
||||||
|
'machinectl',
|
||||||
|
'podman',
|
||||||
|
'setns',
|
||||||
|
'ssh',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_connection_loader__get(name, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
While a Mitogen strategy is active, rewrite connection_loader.get() calls
|
||||||
|
for some transports into requests for a compatible Mitogen transport.
|
||||||
|
"""
|
||||||
|
if name in REDIRECTED_CONNECTION_PLUGINS:
|
||||||
|
name = 'mitogen_' + name
|
||||||
|
|
||||||
|
return ansible_mitogen.loaders.connection_loader__get(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_worker__run(self):
|
||||||
|
"""
|
||||||
|
While a Mitogen strategy is active, trap WorkerProcess.run() calls and use
|
||||||
|
the opportunity to set the worker's name in the process list and log
|
||||||
|
output, activate profiling if requested, and bind the worker to a specific
|
||||||
|
CPU.
|
||||||
|
"""
|
||||||
|
if setproctitle:
|
||||||
|
setproctitle.setproctitle('worker:%s task:%s' % (
|
||||||
|
self._host.name,
|
||||||
|
self._task.action,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Ignore parent's attempts to murder us when we still need to write
|
||||||
|
# profiling output.
|
||||||
|
if mitogen.core._profile_hook.__name__ != '_profile_hook':
|
||||||
|
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||||
|
|
||||||
|
ansible_mitogen.logging.set_process_name('task')
|
||||||
|
ansible_mitogen.affinity.policy.assign_worker()
|
||||||
|
return mitogen.core._profile_hook('WorkerProcess',
|
||||||
|
lambda: worker__run(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AnsibleWrappers(object):
|
||||||
|
"""
|
||||||
|
Manage add/removal of various Ansible runtime hooks.
|
||||||
|
"""
|
||||||
|
def _add_plugin_paths(self):
|
||||||
|
"""
|
||||||
|
Add the Mitogen plug-in directories to the ModuleLoader path, avoiding
|
||||||
|
the need for manual configuration.
|
||||||
|
"""
|
||||||
|
base_dir = os.path.join(os.path.dirname(__file__), 'plugins')
|
||||||
|
ansible_mitogen.loaders.connection_loader.add_directory(
|
||||||
|
os.path.join(base_dir, 'connection')
|
||||||
|
)
|
||||||
|
ansible_mitogen.loaders.action_loader.add_directory(
|
||||||
|
os.path.join(base_dir, 'action')
|
||||||
|
)
|
||||||
|
|
||||||
|
def _install_wrappers(self):
|
||||||
|
"""
|
||||||
|
Install our PluginLoader monkey patches and update global variables
|
||||||
|
with references to the real functions.
|
||||||
|
"""
|
||||||
|
ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get
|
||||||
|
ansible_mitogen.loaders.connection_loader.get_with_context = wrap_connection_loader__get
|
||||||
|
|
||||||
|
global worker__run
|
||||||
|
worker__run = ansible.executor.process.worker.WorkerProcess.run
|
||||||
|
ansible.executor.process.worker.WorkerProcess.run = wrap_worker__run
|
||||||
|
|
||||||
|
def _remove_wrappers(self):
|
||||||
|
"""
|
||||||
|
Uninstall the PluginLoader monkey patches.
|
||||||
|
"""
|
||||||
|
ansible_mitogen.loaders.action_loader.get = (
|
||||||
|
ansible_mitogen.loaders.action_loader__get
|
||||||
|
)
|
||||||
|
ansible_mitogen.loaders.connection_loader.get_with_context = (
|
||||||
|
ansible_mitogen.loaders.connection_loader__get
|
||||||
|
)
|
||||||
|
ansible.executor.process.worker.WorkerProcess.run = worker__run
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
self._add_plugin_paths()
|
||||||
|
self._install_wrappers()
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
self._remove_wrappers()
|
||||||
|
|
||||||
|
|
||||||
|
class StrategyMixin(object):
|
||||||
|
"""
|
||||||
|
This mix-in enhances any built-in strategy by arranging for an appropriate
|
||||||
|
WorkerModel instance to be constructed as necessary, or for the existing
|
||||||
|
one to be reused.
|
||||||
|
|
||||||
|
The WorkerModel in turn arranges for a connection multiplexer to be started
|
||||||
|
somewhere (by default in an external process), and for WorkerProcesses to
|
||||||
|
grow support for using those top-level services to communicate with remote
|
||||||
|
hosts.
|
||||||
|
|
||||||
|
Mitogen:
|
||||||
|
|
||||||
|
A private Broker IO multiplexer thread is created to dispatch IO
|
||||||
|
between the local Router and any connected streams, including streams
|
||||||
|
connected to Ansible WorkerProcesses, and SSH commands implementing
|
||||||
|
connections to remote machines.
|
||||||
|
|
||||||
|
A Router is created that implements message dispatch to any locally
|
||||||
|
registered handlers, and message routing for remote streams. Router is
|
||||||
|
the junction point through which WorkerProceses and remote SSH contexts
|
||||||
|
can communicate.
|
||||||
|
|
||||||
|
Router additionally adds message handlers for a variety of base
|
||||||
|
services, review the Standard Handles section of the How It Works guide
|
||||||
|
in the documentation.
|
||||||
|
|
||||||
|
A ContextService is installed as a message handler in the connection
|
||||||
|
mutliplexer subprocess and run on a private thread. It is responsible
|
||||||
|
for accepting requests to establish new SSH connections from worker
|
||||||
|
processes, and ensuring precisely one connection exists and is reused
|
||||||
|
for subsequent playbook steps. The service presently runs in a single
|
||||||
|
thread, so to begin with, new SSH connections are serialized.
|
||||||
|
|
||||||
|
Finally a mitogen.unix listener is created through which WorkerProcess
|
||||||
|
can establish a connection back into the connection multiplexer, in
|
||||||
|
order to avail of ContextService. A UNIX listener socket is necessary
|
||||||
|
as there is no more sane mechanism to arrange for IPC between the
|
||||||
|
Router in the connection multiplexer, and the corresponding Router in
|
||||||
|
the worker process.
|
||||||
|
|
||||||
|
Ansible:
|
||||||
|
|
||||||
|
PluginLoader monkey patches are installed to catch attempts to create
|
||||||
|
connection and action plug-ins.
|
||||||
|
|
||||||
|
For connection plug-ins, if the desired method is "local" or "ssh", it
|
||||||
|
is redirected to one of the "mitogen_*" connection plug-ins. That
|
||||||
|
plug-in implements communication via a UNIX socket connection to the
|
||||||
|
connection multiplexer process, and uses ContextService running there
|
||||||
|
to establish a persistent connection to the target.
|
||||||
|
|
||||||
|
For action plug-ins, the original class is looked up as usual, but a
|
||||||
|
new subclass is created dynamically in order to mix-in
|
||||||
|
ansible_mitogen.target.ActionModuleMixin, which overrides many of the
|
||||||
|
methods usually inherited from ActionBase in order to replace them with
|
||||||
|
pure-Python equivalents that avoid the use of shell.
|
||||||
|
|
||||||
|
In particular, _execute_module() is overridden with an implementation
|
||||||
|
that uses ansible_mitogen.target.run_module() executed in the target
|
||||||
|
Context. run_module() implements module execution by importing the
|
||||||
|
module as if it were a normal Python module, and capturing its output
|
||||||
|
in the remote process. Since the Mitogen module loader is active in the
|
||||||
|
remote process, all the heavy lifting of transferring the action module
|
||||||
|
and its dependencies are automatically handled by Mitogen.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _queue_task(self, host, task, task_vars, play_context):
|
||||||
|
"""
|
||||||
|
Many PluginLoader caches are defective as they are only populated in
|
||||||
|
the ephemeral WorkerProcess. Touch each plug-in path before forking to
|
||||||
|
ensure all workers receive a hot cache.
|
||||||
|
"""
|
||||||
|
ansible_mitogen.loaders.module_loader.find_plugin(
|
||||||
|
name=task.action,
|
||||||
|
mod_type='',
|
||||||
|
)
|
||||||
|
ansible_mitogen.loaders.action_loader.get(
|
||||||
|
name=task.action,
|
||||||
|
class_only=True,
|
||||||
|
)
|
||||||
|
if play_context.connection is not ansible.utils.sentinel.Sentinel:
|
||||||
|
# 2.8 appears to defer computing this until inside the worker.
|
||||||
|
# TODO: figure out where it has moved.
|
||||||
|
ansible_mitogen.loaders.connection_loader.get(
|
||||||
|
name=play_context.connection,
|
||||||
|
class_only=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return super(StrategyMixin, self)._queue_task(
|
||||||
|
host=host,
|
||||||
|
task=task,
|
||||||
|
task_vars=task_vars,
|
||||||
|
play_context=play_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_worker_model(self):
|
||||||
|
"""
|
||||||
|
In classic mode a single :class:`WorkerModel` exists, which manages
|
||||||
|
references and configuration of the associated connection multiplexer
|
||||||
|
process.
|
||||||
|
"""
|
||||||
|
return ansible_mitogen.process.get_classic_worker_model()
|
||||||
|
|
||||||
|
def run(self, iterator, play_context, result=0):
|
||||||
|
"""
|
||||||
|
Wrap :meth:`run` to ensure requisite infrastructure and modifications
|
||||||
|
are configured for the duration of the call.
|
||||||
|
"""
|
||||||
|
wrappers = AnsibleWrappers()
|
||||||
|
self._worker_model = self._get_worker_model()
|
||||||
|
ansible_mitogen.process.set_worker_model(self._worker_model)
|
||||||
|
try:
|
||||||
|
self._worker_model.on_strategy_start()
|
||||||
|
try:
|
||||||
|
wrappers.install()
|
||||||
|
try:
|
||||||
|
run = super(StrategyMixin, self).run
|
||||||
|
return mitogen.core._profile_hook('Strategy',
|
||||||
|
lambda: run(iterator, play_context)
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
wrappers.remove()
|
||||||
|
finally:
|
||||||
|
self._worker_model.on_strategy_complete()
|
||||||
|
finally:
|
||||||
|
ansible_mitogen.process.set_worker_model(None)
|
771
mitogen-0.3.9/ansible_mitogen/target.py
Normal file
771
mitogen-0.3.9/ansible_mitogen/target.py
Normal file
@ -0,0 +1,771 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
"""
|
||||||
|
Helper functions intended to be executed on the target. These are entrypoints
|
||||||
|
for file transfer, module execution and sundry bits like changing file modes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import grp
|
||||||
|
import json
|
||||||
|
import operator
|
||||||
|
import os
|
||||||
|
import pwd
|
||||||
|
import re
|
||||||
|
import signal
|
||||||
|
import stat
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import traceback
|
||||||
|
import types
|
||||||
|
|
||||||
|
# Absolute imports for <2.5.
|
||||||
|
logging = __import__('logging')
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.parent
|
||||||
|
import mitogen.service
|
||||||
|
from mitogen.core import b
|
||||||
|
|
||||||
|
try:
|
||||||
|
reduce
|
||||||
|
except NameError:
|
||||||
|
# Python 3.x.
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
try:
|
||||||
|
BaseException
|
||||||
|
except NameError:
|
||||||
|
# Python 2.4
|
||||||
|
BaseException = Exception
|
||||||
|
|
||||||
|
|
||||||
|
# Ansible since PR #41749 inserts "import __main__" into
|
||||||
|
# ansible.module_utils.basic. Mitogen's importer will refuse such an import, so
|
||||||
|
# we must setup a fake "__main__" before that module is ever imported. The
|
||||||
|
# str() is to cast Unicode to bytes on Python 2.6.
|
||||||
|
if not sys.modules.get(str('__main__')):
|
||||||
|
sys.modules[str('__main__')] = types.ModuleType(str('__main__'))
|
||||||
|
|
||||||
|
import ansible.module_utils.json_utils
|
||||||
|
import ansible_mitogen.runner
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MAKE_TEMP_FAILED_MSG = (
|
||||||
|
u"Unable to find a useable temporary directory. This likely means no\n"
|
||||||
|
u"system-supplied TMP directory can be written to, or all directories\n"
|
||||||
|
u"were mounted on 'noexec' filesystems.\n"
|
||||||
|
u"\n"
|
||||||
|
u"The following paths were tried:\n"
|
||||||
|
u" %(paths)s\n"
|
||||||
|
u"\n"
|
||||||
|
u"Please check '-vvv' output for a log of individual path errors."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Python 2.4/2.5 cannot support fork+threads whatsoever, it doesn't even fix up
|
||||||
|
# interpreter state. So 2.4/2.5 interpreters start .local() contexts for
|
||||||
|
# isolation instead. Since we don't have any crazy memory sharing problems to
|
||||||
|
# avoid, there is no virginal fork parent either. The child is started directly
|
||||||
|
# from the login/become process. In future this will be default everywhere,
|
||||||
|
# fork is brainwrong from the stone age.
|
||||||
|
FORK_SUPPORTED = sys.version_info >= (2, 6)
|
||||||
|
|
||||||
|
#: Initialized to an econtext.parent.Context pointing at a pristine fork of
|
||||||
|
#: the target Python interpreter before it executes any code or imports.
|
||||||
|
_fork_parent = None
|
||||||
|
|
||||||
|
#: Set by :func:`init_child` to the name of a writeable and executable
|
||||||
|
#: temporary directory accessible by the active user account.
|
||||||
|
good_temp_dir = None
|
||||||
|
|
||||||
|
|
||||||
|
def subprocess__Popen__close_fds(self, but):
|
||||||
|
"""
|
||||||
|
issue #362, #435: subprocess.Popen(close_fds=True) aka.
|
||||||
|
AnsibleModule.run_command() loops the entire FD space on Python<3.2.
|
||||||
|
CentOS>5 ships with 1,048,576 FDs by default, resulting in huge (>500ms)
|
||||||
|
latency starting children. Therefore replace Popen._close_fds on Linux with
|
||||||
|
a version that is O(fds) rather than O(_SC_OPEN_MAX).
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
names = os.listdir(u'/proc/self/fd')
|
||||||
|
except OSError:
|
||||||
|
# May fail if acting on a container that does not have /proc mounted.
|
||||||
|
self._original_close_fds(but)
|
||||||
|
return
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
if not name.isdigit():
|
||||||
|
continue
|
||||||
|
|
||||||
|
fd = int(name, 10)
|
||||||
|
if fd > 2 and fd != but:
|
||||||
|
try:
|
||||||
|
os.close(fd)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if (
|
||||||
|
sys.platform.startswith(u'linux') and
|
||||||
|
sys.version_info < (3,) and
|
||||||
|
hasattr(subprocess.Popen, u'_close_fds') and
|
||||||
|
not mitogen.is_master
|
||||||
|
):
|
||||||
|
subprocess.Popen._original_close_fds = subprocess.Popen._close_fds
|
||||||
|
subprocess.Popen._close_fds = subprocess__Popen__close_fds
|
||||||
|
|
||||||
|
|
||||||
|
def get_small_file(context, path):
|
||||||
|
"""
|
||||||
|
Basic in-memory caching module fetcher. This generates one roundtrip for
|
||||||
|
every previously unseen file, so it is only a temporary solution.
|
||||||
|
|
||||||
|
:param context:
|
||||||
|
Context we should direct FileService requests to. For now (and probably
|
||||||
|
forever) this is just the top-level Mitogen connection manager process.
|
||||||
|
:param path:
|
||||||
|
Path to fetch from FileService, must previously have been registered by
|
||||||
|
a privileged context using the `register` command.
|
||||||
|
:returns:
|
||||||
|
Bytestring file data.
|
||||||
|
"""
|
||||||
|
pool = mitogen.service.get_or_create_pool(router=context.router)
|
||||||
|
service = pool.get_service(u'mitogen.service.PushFileService')
|
||||||
|
return service.get(path)
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_file(context, in_path, out_path, sync=False, set_owner=False):
|
||||||
|
"""
|
||||||
|
Streamily download a file from the connection multiplexer process in the
|
||||||
|
controller.
|
||||||
|
|
||||||
|
:param mitogen.core.Context context:
|
||||||
|
Reference to the context hosting the FileService that will transmit the
|
||||||
|
file.
|
||||||
|
:param bytes in_path:
|
||||||
|
FileService registered name of the input file.
|
||||||
|
:param bytes out_path:
|
||||||
|
Name of the output path on the local disk.
|
||||||
|
:param bool sync:
|
||||||
|
If :data:`True`, ensure the file content and metadat are fully on disk
|
||||||
|
before renaming the temporary file over the existing file. This should
|
||||||
|
ensure in the case of system crash, either the entire old or new file
|
||||||
|
are visible post-reboot.
|
||||||
|
:param bool set_owner:
|
||||||
|
If :data:`True`, look up the metadata username and group on the local
|
||||||
|
system and file the file owner using :func:`os.fchmod`.
|
||||||
|
"""
|
||||||
|
out_path = os.path.abspath(out_path)
|
||||||
|
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
||||||
|
prefix='.ansible_mitogen_transfer-',
|
||||||
|
dir=os.path.dirname(out_path))
|
||||||
|
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
||||||
|
LOG.debug('transfer_file(%r) temporary file: %s', out_path, tmp_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
ok, metadata = mitogen.service.FileService.get(
|
||||||
|
context=context,
|
||||||
|
path=in_path,
|
||||||
|
out_fp=fp,
|
||||||
|
)
|
||||||
|
if not ok:
|
||||||
|
raise IOError('transfer of %r was interrupted.' % (in_path,))
|
||||||
|
|
||||||
|
set_file_mode(tmp_path, metadata['mode'], fd=fp.fileno())
|
||||||
|
if set_owner:
|
||||||
|
set_file_owner(tmp_path, metadata['owner'], metadata['group'],
|
||||||
|
fd=fp.fileno())
|
||||||
|
finally:
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
if sync:
|
||||||
|
os.fsync(fp.fileno())
|
||||||
|
os.rename(tmp_path, out_path)
|
||||||
|
except BaseException:
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
raise
|
||||||
|
|
||||||
|
os.utime(out_path, (metadata['atime'], metadata['mtime']))
|
||||||
|
|
||||||
|
|
||||||
|
def prune_tree(path):
|
||||||
|
"""
|
||||||
|
Like shutil.rmtree(), but log errors rather than discard them, and do not
|
||||||
|
waste multiple os.stat() calls discovering whether the object can be
|
||||||
|
deleted, just try deleting it instead.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
os.unlink(path)
|
||||||
|
return
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
if not (os.path.isdir(path) and
|
||||||
|
e.args[0] in (errno.EPERM, errno.EISDIR)):
|
||||||
|
LOG.error('prune_tree(%r): %s', path, e)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure write access for readonly directories. Ignore error in case
|
||||||
|
# path is on a weird filesystem (e.g. vfat).
|
||||||
|
os.chmod(path, int('0700', 8))
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.warning('prune_tree(%r): %s', path, e)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for name in os.listdir(path):
|
||||||
|
if name not in ('.', '..'):
|
||||||
|
prune_tree(os.path.join(path, name))
|
||||||
|
os.rmdir(path)
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.error('prune_tree(%r): %s', path, e)
|
||||||
|
|
||||||
|
|
||||||
|
def is_good_temp_dir(path):
|
||||||
|
"""
|
||||||
|
Return :data:`True` if `path` can be used as a temporary directory, logging
|
||||||
|
any failures that may cause it to be unsuitable. If the directory doesn't
|
||||||
|
exist, we attempt to create it using :func:`os.makedirs`.
|
||||||
|
"""
|
||||||
|
if not os.path.exists(path):
|
||||||
|
try:
|
||||||
|
os.makedirs(path, mode=int('0700', 8))
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.debug('temp dir %r unusable: did not exist and attempting '
|
||||||
|
'to create it failed: %s', path, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
tmp = tempfile.NamedTemporaryFile(
|
||||||
|
prefix='ansible_mitogen_is_good_temp_dir',
|
||||||
|
dir=path,
|
||||||
|
)
|
||||||
|
except (OSError, IOError):
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.debug('temp dir %r unusable: %s', path, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
os.chmod(tmp.name, int('0700', 8))
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.debug('temp dir %r unusable: chmod failed: %s', path, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# access(.., X_OK) is sufficient to detect noexec.
|
||||||
|
if not os.access(tmp.name, os.X_OK):
|
||||||
|
raise OSError('filesystem appears to be mounted noexec')
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
LOG.debug('temp dir %r unusable: %s', path, e)
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
tmp.close()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def find_good_temp_dir(candidate_temp_dirs):
|
||||||
|
"""
|
||||||
|
Given a list of candidate temp directories extracted from ``ansible.cfg``,
|
||||||
|
combine it with the Python-builtin list of candidate directories used by
|
||||||
|
:mod:`tempfile`, then iteratively try each until one is found that is both
|
||||||
|
writeable and executable.
|
||||||
|
|
||||||
|
:param list candidate_temp_dirs:
|
||||||
|
List of candidate $variable-expanded and tilde-expanded directory paths
|
||||||
|
that may be usable as a temporary directory.
|
||||||
|
"""
|
||||||
|
paths = [os.path.expandvars(os.path.expanduser(p))
|
||||||
|
for p in candidate_temp_dirs]
|
||||||
|
paths.extend(tempfile._candidate_tempdir_list())
|
||||||
|
|
||||||
|
for path in paths:
|
||||||
|
if is_good_temp_dir(path):
|
||||||
|
LOG.debug('Selected temp directory: %r (from %r)', path, paths)
|
||||||
|
return path
|
||||||
|
|
||||||
|
raise IOError(MAKE_TEMP_FAILED_MSG % {
|
||||||
|
'paths': '\n '.join(paths),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@mitogen.core.takes_econtext
|
||||||
|
def init_child(econtext, log_level, candidate_temp_dirs):
|
||||||
|
"""
|
||||||
|
Called by ContextService immediately after connection; arranges for the
|
||||||
|
(presently) spotless Python interpreter to be forked, where the newly
|
||||||
|
forked interpreter becomes the parent of any newly forked future
|
||||||
|
interpreters.
|
||||||
|
|
||||||
|
This is necessary to prevent modules that are executed in-process from
|
||||||
|
polluting the global interpreter state in a way that effects explicitly
|
||||||
|
isolated modules.
|
||||||
|
|
||||||
|
:param int log_level:
|
||||||
|
Logging package level active in the master.
|
||||||
|
:param list[str] candidate_temp_dirs:
|
||||||
|
List of $variable-expanded and tilde-expanded directory names to add to
|
||||||
|
candidate list of temporary directories.
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
Dict like::
|
||||||
|
|
||||||
|
{
|
||||||
|
'fork_context': mitogen.core.Context or None,
|
||||||
|
'good_temp_dir': ...
|
||||||
|
'home_dir': str
|
||||||
|
}
|
||||||
|
|
||||||
|
Where `fork_context` refers to the newly forked 'fork parent' context
|
||||||
|
the controller will use to start forked jobs, and `home_dir` is the
|
||||||
|
home directory for the active user account.
|
||||||
|
"""
|
||||||
|
# Copying the master's log level causes log messages to be filtered before
|
||||||
|
# they reach LogForwarder, thus reducing an influx of tiny messges waking
|
||||||
|
# the connection multiplexer process in the master.
|
||||||
|
LOG.setLevel(log_level)
|
||||||
|
logging.getLogger('ansible_mitogen').setLevel(log_level)
|
||||||
|
|
||||||
|
global _fork_parent
|
||||||
|
if FORK_SUPPORTED:
|
||||||
|
mitogen.parent.upgrade_router(econtext)
|
||||||
|
_fork_parent = econtext.router.fork()
|
||||||
|
|
||||||
|
global good_temp_dir
|
||||||
|
good_temp_dir = find_good_temp_dir(candidate_temp_dirs)
|
||||||
|
|
||||||
|
return {
|
||||||
|
u'fork_context': _fork_parent,
|
||||||
|
u'home_dir': mitogen.core.to_text(os.path.expanduser('~')),
|
||||||
|
u'good_temp_dir': good_temp_dir,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@mitogen.core.takes_econtext
|
||||||
|
def spawn_isolated_child(econtext):
|
||||||
|
"""
|
||||||
|
For helper functions executed in the fork parent context, arrange for
|
||||||
|
the context's router to be upgraded as necessary and for a new child to be
|
||||||
|
prepared.
|
||||||
|
|
||||||
|
The actual fork occurs from the 'virginal fork parent', which does not have
|
||||||
|
any Ansible modules loaded prior to fork, to avoid conflicts resulting from
|
||||||
|
custom module_utils paths.
|
||||||
|
"""
|
||||||
|
mitogen.parent.upgrade_router(econtext)
|
||||||
|
if FORK_SUPPORTED:
|
||||||
|
context = econtext.router.fork()
|
||||||
|
else:
|
||||||
|
context = econtext.router.local()
|
||||||
|
LOG.debug('create_fork_child() -> %r', context)
|
||||||
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
def run_module(kwargs):
|
||||||
|
"""
|
||||||
|
Set up the process environment in preparation for running an Ansible
|
||||||
|
module. This monkey-patches the Ansible libraries in various places to
|
||||||
|
prevent it from trying to kill the process on completion, and to prevent it
|
||||||
|
from reading sys.stdin.
|
||||||
|
"""
|
||||||
|
runner_name = kwargs.pop('runner_name')
|
||||||
|
klass = getattr(ansible_mitogen.runner, runner_name)
|
||||||
|
impl = klass(**mitogen.core.Kwargs(kwargs))
|
||||||
|
return impl.run()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_async_dir():
|
||||||
|
return os.path.expanduser(
|
||||||
|
os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncRunner(object):
|
||||||
|
def __init__(self, job_id, timeout_secs, started_sender, econtext, kwargs):
|
||||||
|
self.job_id = job_id
|
||||||
|
self.timeout_secs = timeout_secs
|
||||||
|
self.started_sender = started_sender
|
||||||
|
self.econtext = econtext
|
||||||
|
self.kwargs = kwargs
|
||||||
|
self._timed_out = False
|
||||||
|
self._init_path()
|
||||||
|
|
||||||
|
def _init_path(self):
|
||||||
|
async_dir = _get_async_dir()
|
||||||
|
if not os.path.exists(async_dir):
|
||||||
|
os.makedirs(async_dir)
|
||||||
|
self.path = os.path.join(async_dir, self.job_id)
|
||||||
|
|
||||||
|
def _update(self, dct):
|
||||||
|
"""
|
||||||
|
Update an async job status file.
|
||||||
|
"""
|
||||||
|
LOG.info('%r._update(%r, %r)', self, self.job_id, dct)
|
||||||
|
dct.setdefault('ansible_job_id', self.job_id)
|
||||||
|
dct.setdefault('data', '')
|
||||||
|
|
||||||
|
fp = open(self.path + '.tmp', 'w')
|
||||||
|
try:
|
||||||
|
fp.write(json.dumps(dct))
|
||||||
|
finally:
|
||||||
|
fp.close()
|
||||||
|
os.rename(self.path + '.tmp', self.path)
|
||||||
|
|
||||||
|
def _on_sigalrm(self, signum, frame):
|
||||||
|
"""
|
||||||
|
Respond to SIGALRM (job timeout) by updating the job file and killing
|
||||||
|
the process.
|
||||||
|
"""
|
||||||
|
msg = "Job reached maximum time limit of %d seconds." % (
|
||||||
|
self.timeout_secs,
|
||||||
|
)
|
||||||
|
self._update({
|
||||||
|
"failed": 1,
|
||||||
|
"finished": 1,
|
||||||
|
"msg": msg,
|
||||||
|
})
|
||||||
|
self._timed_out = True
|
||||||
|
self.econtext.broker.shutdown()
|
||||||
|
|
||||||
|
def _install_alarm(self):
|
||||||
|
signal.signal(signal.SIGALRM, self._on_sigalrm)
|
||||||
|
signal.alarm(self.timeout_secs)
|
||||||
|
|
||||||
|
def _run_module(self):
|
||||||
|
kwargs = dict(self.kwargs, **{
|
||||||
|
'detach': True,
|
||||||
|
'econtext': self.econtext,
|
||||||
|
'emulate_tty': False,
|
||||||
|
})
|
||||||
|
return run_module(kwargs)
|
||||||
|
|
||||||
|
def _parse_result(self, dct):
|
||||||
|
filtered, warnings = (
|
||||||
|
ansible.module_utils.json_utils.
|
||||||
|
_filter_non_json_lines(dct['stdout'])
|
||||||
|
)
|
||||||
|
result = json.loads(filtered)
|
||||||
|
result.setdefault('warnings', []).extend(warnings)
|
||||||
|
result['stderr'] = dct['stderr'] or result.get('stderr', '')
|
||||||
|
self._update(result)
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
"""
|
||||||
|
1. Immediately updates the status file to mark the job as started.
|
||||||
|
2. Installs a timer/signal handler to implement the time limit.
|
||||||
|
3. Runs as with run_module(), writing the result to the status file.
|
||||||
|
|
||||||
|
:param dict kwargs:
|
||||||
|
Runner keyword arguments.
|
||||||
|
:param str job_id:
|
||||||
|
String job ID.
|
||||||
|
:param int timeout_secs:
|
||||||
|
If >0, limit the task's maximum run time.
|
||||||
|
"""
|
||||||
|
self._update({
|
||||||
|
'started': 1,
|
||||||
|
'finished': 0,
|
||||||
|
'pid': os.getpid()
|
||||||
|
})
|
||||||
|
self.started_sender.send(True)
|
||||||
|
|
||||||
|
if self.timeout_secs > 0:
|
||||||
|
self._install_alarm()
|
||||||
|
|
||||||
|
dct = self._run_module()
|
||||||
|
if not self._timed_out:
|
||||||
|
# After SIGALRM fires, there is a window between broker responding
|
||||||
|
# to shutdown() by killing the process, and work continuing on the
|
||||||
|
# main thread. If main thread was asleep in at least
|
||||||
|
# basic.py/select.select(), an EINTR will be raised. We want to
|
||||||
|
# discard that exception.
|
||||||
|
try:
|
||||||
|
self._parse_result(dct)
|
||||||
|
except Exception:
|
||||||
|
self._update({
|
||||||
|
"failed": 1,
|
||||||
|
"msg": traceback.format_exc(),
|
||||||
|
"data": dct['stdout'], # temporary notice only
|
||||||
|
"stderr": dct['stderr']
|
||||||
|
})
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
self._run()
|
||||||
|
except Exception:
|
||||||
|
self._update({
|
||||||
|
"failed": 1,
|
||||||
|
"msg": traceback.format_exc(),
|
||||||
|
})
|
||||||
|
finally:
|
||||||
|
self.econtext.broker.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
@mitogen.core.takes_econtext
|
||||||
|
def run_module_async(kwargs, job_id, timeout_secs, started_sender, econtext):
|
||||||
|
"""
|
||||||
|
Execute a module with its run status and result written to a file,
|
||||||
|
terminating on the process on completion. This function must run in a child
|
||||||
|
forked using :func:`create_fork_child`.
|
||||||
|
|
||||||
|
@param mitogen.core.Sender started_sender:
|
||||||
|
A sender that will receive :data:`True` once the job has reached a
|
||||||
|
point where its initial job file has been written. This is required to
|
||||||
|
avoid a race where an overly eager controller can check for a task
|
||||||
|
before it has reached that point in execution, which is possible at
|
||||||
|
least on Python 2.4, where forking is not available for async tasks.
|
||||||
|
"""
|
||||||
|
arunner = AsyncRunner(
|
||||||
|
job_id,
|
||||||
|
timeout_secs,
|
||||||
|
started_sender,
|
||||||
|
econtext,
|
||||||
|
kwargs
|
||||||
|
)
|
||||||
|
arunner.run()
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_shell():
|
||||||
|
"""
|
||||||
|
For commands executed directly via an SSH command-line, SSH looks up the
|
||||||
|
user's shell via getpwuid() and only defaults to /bin/sh if that field is
|
||||||
|
missing or empty.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pw_shell = pwd.getpwuid(os.geteuid()).pw_shell
|
||||||
|
except KeyError:
|
||||||
|
pw_shell = None
|
||||||
|
|
||||||
|
return pw_shell or '/bin/sh'
|
||||||
|
|
||||||
|
|
||||||
|
def exec_args(args, in_data='', chdir=None, shell=None, emulate_tty=False):
|
||||||
|
"""
|
||||||
|
Run a command in a subprocess, emulating the argument handling behaviour of
|
||||||
|
SSH.
|
||||||
|
|
||||||
|
:param list[str]:
|
||||||
|
Argument vector.
|
||||||
|
:param bytes in_data:
|
||||||
|
Optional standard input for the command.
|
||||||
|
:param bool emulate_tty:
|
||||||
|
If :data:`True`, arrange for stdout and stderr to be merged into the
|
||||||
|
stdout pipe and for LF to be translated into CRLF, emulating the
|
||||||
|
behaviour of a TTY.
|
||||||
|
:return:
|
||||||
|
(return code, stdout bytes, stderr bytes)
|
||||||
|
"""
|
||||||
|
LOG.debug('exec_args(%r, ..., chdir=%r)', args, chdir)
|
||||||
|
assert isinstance(args, list)
|
||||||
|
|
||||||
|
if emulate_tty:
|
||||||
|
stderr = subprocess.STDOUT
|
||||||
|
else:
|
||||||
|
stderr = subprocess.PIPE
|
||||||
|
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
args=args,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=stderr,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
cwd=chdir,
|
||||||
|
)
|
||||||
|
stdout, stderr = proc.communicate(in_data)
|
||||||
|
|
||||||
|
if emulate_tty:
|
||||||
|
stdout = stdout.replace(b('\n'), b('\r\n'))
|
||||||
|
return proc.returncode, stdout, stderr or b('')
|
||||||
|
|
||||||
|
|
||||||
|
def exec_command(cmd, in_data='', chdir=None, shell=None, emulate_tty=False):
|
||||||
|
"""
|
||||||
|
Run a command in a subprocess, emulating the argument handling behaviour of
|
||||||
|
SSH.
|
||||||
|
|
||||||
|
:param bytes cmd:
|
||||||
|
String command line, passed to user's shell.
|
||||||
|
:param bytes in_data:
|
||||||
|
Optional standard input for the command.
|
||||||
|
:return:
|
||||||
|
(return code, stdout bytes, stderr bytes)
|
||||||
|
"""
|
||||||
|
assert isinstance(cmd, mitogen.core.UnicodeType)
|
||||||
|
return exec_args(
|
||||||
|
args=[get_user_shell(), '-c', cmd],
|
||||||
|
in_data=in_data,
|
||||||
|
chdir=chdir,
|
||||||
|
shell=shell,
|
||||||
|
emulate_tty=emulate_tty,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_path(path):
|
||||||
|
"""
|
||||||
|
Fetch the contents of a filesystem `path` as bytes.
|
||||||
|
"""
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def set_file_owner(path, owner, group=None, fd=None):
|
||||||
|
if owner:
|
||||||
|
uid = pwd.getpwnam(owner).pw_uid
|
||||||
|
else:
|
||||||
|
uid = os.geteuid()
|
||||||
|
|
||||||
|
if group:
|
||||||
|
gid = grp.getgrnam(group).gr_gid
|
||||||
|
else:
|
||||||
|
gid = os.getegid()
|
||||||
|
|
||||||
|
if fd is not None and hasattr(os, 'fchown'):
|
||||||
|
os.fchown(fd, (uid, gid))
|
||||||
|
else:
|
||||||
|
# Python<2.6
|
||||||
|
os.chown(path, (uid, gid))
|
||||||
|
|
||||||
|
|
||||||
|
def write_path(path, s, owner=None, group=None, mode=None,
|
||||||
|
utimes=None, sync=False):
|
||||||
|
"""
|
||||||
|
Writes bytes `s` to a filesystem `path`.
|
||||||
|
"""
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
||||||
|
prefix='.ansible_mitogen_transfer-',
|
||||||
|
dir=os.path.dirname(path))
|
||||||
|
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
||||||
|
LOG.debug('write_path(path=%r) temporary file: %s', path, tmp_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
if mode:
|
||||||
|
set_file_mode(tmp_path, mode, fd=fp.fileno())
|
||||||
|
if owner or group:
|
||||||
|
set_file_owner(tmp_path, owner, group, fd=fp.fileno())
|
||||||
|
fp.write(s)
|
||||||
|
finally:
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
if sync:
|
||||||
|
os.fsync(fp.fileno())
|
||||||
|
os.rename(tmp_path, path)
|
||||||
|
except BaseException:
|
||||||
|
os.unlink(tmp_path)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if utimes:
|
||||||
|
os.utime(path, utimes)
|
||||||
|
|
||||||
|
|
||||||
|
CHMOD_CLAUSE_PAT = re.compile(r'([uoga]*)([+\-=])([ugo]|[rwx]*)')
|
||||||
|
CHMOD_MASKS = {
|
||||||
|
'u': stat.S_IRWXU,
|
||||||
|
'g': stat.S_IRWXG,
|
||||||
|
'o': stat.S_IRWXO,
|
||||||
|
'a': (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO),
|
||||||
|
}
|
||||||
|
CHMOD_BITS = {
|
||||||
|
'u': {'r': stat.S_IRUSR, 'w': stat.S_IWUSR, 'x': stat.S_IXUSR},
|
||||||
|
'g': {'r': stat.S_IRGRP, 'w': stat.S_IWGRP, 'x': stat.S_IXGRP},
|
||||||
|
'o': {'r': stat.S_IROTH, 'w': stat.S_IWOTH, 'x': stat.S_IXOTH},
|
||||||
|
'a': {
|
||||||
|
'r': (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH),
|
||||||
|
'w': (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH),
|
||||||
|
'x': (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def apply_mode_spec(spec, mode):
|
||||||
|
"""
|
||||||
|
Given a symbolic file mode change specification in the style of chmod(1)
|
||||||
|
`spec`, apply changes in the specification to the numeric file mode `mode`.
|
||||||
|
"""
|
||||||
|
for clause in mitogen.core.to_text(spec).split(','):
|
||||||
|
match = CHMOD_CLAUSE_PAT.match(clause)
|
||||||
|
who, op, perms = match.groups()
|
||||||
|
for ch in who or 'a':
|
||||||
|
mask = CHMOD_MASKS[ch]
|
||||||
|
bits = CHMOD_BITS[ch]
|
||||||
|
cur_perm_bits = mode & mask
|
||||||
|
new_perm_bits = reduce(operator.or_, (bits[p] for p in perms), 0)
|
||||||
|
mode &= ~mask
|
||||||
|
if op == '=':
|
||||||
|
mode |= new_perm_bits
|
||||||
|
elif op == '+':
|
||||||
|
mode |= new_perm_bits | cur_perm_bits
|
||||||
|
else:
|
||||||
|
mode |= cur_perm_bits & ~new_perm_bits
|
||||||
|
return mode
|
||||||
|
|
||||||
|
|
||||||
|
def set_file_mode(path, spec, fd=None):
|
||||||
|
"""
|
||||||
|
Update the permissions of a file using the same syntax as chmod(1).
|
||||||
|
"""
|
||||||
|
if isinstance(spec, int):
|
||||||
|
new_mode = spec
|
||||||
|
elif not mitogen.core.PY3 and isinstance(spec, long):
|
||||||
|
new_mode = spec
|
||||||
|
elif spec.isdigit():
|
||||||
|
new_mode = int(spec, 8)
|
||||||
|
else:
|
||||||
|
mode = os.stat(path).st_mode
|
||||||
|
new_mode = apply_mode_spec(spec, mode)
|
||||||
|
|
||||||
|
if fd is not None and hasattr(os, 'fchmod'):
|
||||||
|
os.fchmod(fd, new_mode)
|
||||||
|
else:
|
||||||
|
os.chmod(path, new_mode)
|
||||||
|
|
||||||
|
|
||||||
|
def file_exists(path):
|
||||||
|
"""
|
||||||
|
Return :data:`True` if `path` exists. This is a wrapper function over
|
||||||
|
:func:`os.path.exists`, since its implementation module varies across
|
||||||
|
Python versions.
|
||||||
|
"""
|
||||||
|
return os.path.exists(path)
|
820
mitogen-0.3.9/ansible_mitogen/transport_config.py
Normal file
820
mitogen-0.3.9/ansible_mitogen/transport_config.py
Normal file
@ -0,0 +1,820 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Mitogen extends Ansible's target configuration mechanism in several ways that
|
||||||
|
require some care:
|
||||||
|
|
||||||
|
* Per-task configurables in Ansible like ansible_python_interpreter are
|
||||||
|
connection-layer configurables in Mitogen. They must be extracted during each
|
||||||
|
task execution to form the complete connection-layer configuration.
|
||||||
|
|
||||||
|
* Mitogen has extra configurables not supported by Ansible at all, such as
|
||||||
|
mitogen_ssh_debug_level. These are extracted the same way as
|
||||||
|
ansible_python_interpreter.
|
||||||
|
|
||||||
|
* Mitogen allows connections to be delegated to other machines. Ansible has no
|
||||||
|
internal framework for this, and so Mitogen must figure out a delegated
|
||||||
|
connection configuration all on its own. It cannot reuse much of the Ansible
|
||||||
|
machinery for building a connection configuration, as that machinery is
|
||||||
|
deeply spread out and hard-wired to expect Ansible's usual mode of operation.
|
||||||
|
|
||||||
|
For normal and delegate_to connections, Ansible's PlayContext is reused where
|
||||||
|
possible to maximize compatibility, but for proxy hops, configurations are
|
||||||
|
built up using the HostVars magic class to call VariableManager.get_vars()
|
||||||
|
behind the scenes on our behalf. Where Ansible has multiple sources of a
|
||||||
|
configuration item, for example, ansible_ssh_extra_args, Mitogen must (ideally
|
||||||
|
perfectly) reproduce how Ansible arrives at its value, without using mechanisms
|
||||||
|
that are hard-wired or change across Ansible versions.
|
||||||
|
|
||||||
|
That is what this file is for. It exports two spec classes, one that takes all
|
||||||
|
information from PlayContext, and another that takes (almost) all information
|
||||||
|
from HostVars.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import os
|
||||||
|
import ansible.utils.shlex
|
||||||
|
import ansible.constants as C
|
||||||
|
|
||||||
|
from ansible.module_utils.six import with_metaclass
|
||||||
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
|
|
||||||
|
# this was added in Ansible >= 2.8.0; fallback to the default interpreter if necessary
|
||||||
|
try:
|
||||||
|
from ansible.executor.interpreter_discovery import discover_interpreter
|
||||||
|
except ImportError:
|
||||||
|
discover_interpreter = lambda action,interpreter_name,discovery_mode,task_vars: '/usr/bin/python'
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.utils.unsafe_proxy import AnsibleUnsafeText
|
||||||
|
except ImportError:
|
||||||
|
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
|
||||||
|
|
||||||
|
import mitogen.core
|
||||||
|
|
||||||
|
|
||||||
|
def run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python):
|
||||||
|
"""
|
||||||
|
Triggers ansible python interpreter discovery if requested.
|
||||||
|
Caches this value the same way Ansible does it.
|
||||||
|
For connections like `docker`, we want to rediscover the python interpreter because
|
||||||
|
it could be different than what's ran on the host
|
||||||
|
"""
|
||||||
|
# keep trying different interpreters until we don't error
|
||||||
|
if action._finding_python_interpreter:
|
||||||
|
return action._possible_python_interpreter
|
||||||
|
|
||||||
|
if s in ['auto', 'auto_legacy', 'auto_silent', 'auto_legacy_silent']:
|
||||||
|
# python is the only supported interpreter_name as of Ansible 2.8.8
|
||||||
|
interpreter_name = 'python'
|
||||||
|
discovered_interpreter_config = u'discovered_interpreter_%s' % interpreter_name
|
||||||
|
|
||||||
|
if task_vars.get('ansible_facts') is None:
|
||||||
|
task_vars['ansible_facts'] = {}
|
||||||
|
|
||||||
|
if rediscover_python and task_vars.get('ansible_facts', {}).get(discovered_interpreter_config):
|
||||||
|
# if we're rediscovering python then chances are we're running something like a docker connection
|
||||||
|
# this will handle scenarios like running a playbook that does stuff + then dynamically creates a docker container,
|
||||||
|
# then runs the rest of the playbook inside that container, and then rerunning the playbook again
|
||||||
|
action._rediscovered_python = True
|
||||||
|
|
||||||
|
# blow away the discovered_interpreter_config cache and rediscover
|
||||||
|
del task_vars['ansible_facts'][discovered_interpreter_config]
|
||||||
|
|
||||||
|
if discovered_interpreter_config not in task_vars['ansible_facts']:
|
||||||
|
action._finding_python_interpreter = True
|
||||||
|
# fake pipelining so discover_interpreter can be happy
|
||||||
|
action._connection.has_pipelining = True
|
||||||
|
s = AnsibleUnsafeText(discover_interpreter(
|
||||||
|
action=action,
|
||||||
|
interpreter_name=interpreter_name,
|
||||||
|
discovery_mode=s,
|
||||||
|
task_vars=task_vars))
|
||||||
|
|
||||||
|
# cache discovered interpreter
|
||||||
|
task_vars['ansible_facts'][discovered_interpreter_config] = s
|
||||||
|
action._connection.has_pipelining = False
|
||||||
|
else:
|
||||||
|
s = task_vars['ansible_facts'][discovered_interpreter_config]
|
||||||
|
|
||||||
|
# propagate discovered interpreter as fact
|
||||||
|
action._discovered_interpreter_key = discovered_interpreter_config
|
||||||
|
action._discovered_interpreter = s
|
||||||
|
|
||||||
|
action._finding_python_interpreter = False
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def parse_python_path(s, task_vars, action, rediscover_python):
|
||||||
|
"""
|
||||||
|
Given the string set for ansible_python_interpeter, parse it using shell
|
||||||
|
syntax and return an appropriate argument vector. If the value detected is
|
||||||
|
one of interpreter discovery then run that first. Caches python interpreter
|
||||||
|
discovery value in `facts_from_task_vars` like how Ansible handles this.
|
||||||
|
"""
|
||||||
|
if not s:
|
||||||
|
# if python_path doesn't exist, default to `auto` and attempt to discover it
|
||||||
|
s = 'auto'
|
||||||
|
|
||||||
|
s = run_interpreter_discovery_if_necessary(s, task_vars, action, rediscover_python)
|
||||||
|
# if unable to determine python_path, fallback to '/usr/bin/python'
|
||||||
|
if not s:
|
||||||
|
s = '/usr/bin/python'
|
||||||
|
|
||||||
|
return ansible.utils.shlex.shlex_split(s)
|
||||||
|
|
||||||
|
|
||||||
|
def optional_secret(value):
|
||||||
|
"""
|
||||||
|
Wrap `value` in :class:`mitogen.core.Secret` if it is not :data:`None`,
|
||||||
|
otherwise return :data:`None`.
|
||||||
|
"""
|
||||||
|
if value is not None:
|
||||||
|
return mitogen.core.Secret(value)
|
||||||
|
|
||||||
|
|
||||||
|
def first_true(it, default=None):
|
||||||
|
"""
|
||||||
|
Return the first truthy element from `it`.
|
||||||
|
"""
|
||||||
|
for elem in it:
|
||||||
|
if elem:
|
||||||
|
return elem
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
class Spec(with_metaclass(abc.ABCMeta, object)):
|
||||||
|
"""
|
||||||
|
A source for variables that comprise a connection configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def transport(self):
|
||||||
|
"""
|
||||||
|
The name of the Ansible plug-in implementing the connection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def inventory_name(self):
|
||||||
|
"""
|
||||||
|
The name of the target being connected to as it appears in Ansible's
|
||||||
|
inventory.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def remote_addr(self):
|
||||||
|
"""
|
||||||
|
The network address of the target, or for container and other special
|
||||||
|
targets, some other unique identifier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def remote_user(self):
|
||||||
|
"""
|
||||||
|
The username of the login account on the target.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def password(self):
|
||||||
|
"""
|
||||||
|
The password of the login account on the target.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def become(self):
|
||||||
|
"""
|
||||||
|
:data:`True` if privilege escalation should be active.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def become_method(self):
|
||||||
|
"""
|
||||||
|
The name of the Ansible become method to use.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def become_user(self):
|
||||||
|
"""
|
||||||
|
The username of the target account for become.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def become_pass(self):
|
||||||
|
"""
|
||||||
|
The password of the target account for become.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def port(self):
|
||||||
|
"""
|
||||||
|
The port of the login service on the target machine.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def python_path(self):
|
||||||
|
"""
|
||||||
|
Path to the Python interpreter on the target machine.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def host_key_checking(self):
|
||||||
|
"""
|
||||||
|
Whether or not to check the keys of the target machine
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_key_file(self):
|
||||||
|
"""
|
||||||
|
Path to the SSH private key file to use to login.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def ssh_executable(self):
|
||||||
|
"""
|
||||||
|
Path to the SSH executable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def timeout(self):
|
||||||
|
"""
|
||||||
|
The generic timeout for all connections.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def ansible_ssh_timeout(self):
|
||||||
|
"""
|
||||||
|
The SSH-specific timeout for a connection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def ssh_args(self):
|
||||||
|
"""
|
||||||
|
The list of additional arguments that should be included in an SSH
|
||||||
|
invocation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def become_exe(self):
|
||||||
|
"""
|
||||||
|
The path to the executable implementing the become method on the remote
|
||||||
|
machine.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def sudo_args(self):
|
||||||
|
"""
|
||||||
|
The list of additional arguments that should be included in a become
|
||||||
|
invocation.
|
||||||
|
"""
|
||||||
|
# TODO: split out into sudo_args/become_args.
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_via(self):
|
||||||
|
"""
|
||||||
|
The value of the mitogen_via= variable for this connection. Indicates
|
||||||
|
the connection should be established via an intermediary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_kind(self):
|
||||||
|
"""
|
||||||
|
The type of container to use with the "setns" transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_mask_remote_name(self):
|
||||||
|
"""
|
||||||
|
Specifies whether to set a fixed "remote_name" field. The remote_name
|
||||||
|
is the suffix of `argv[0]` for remote interpreters. By default it
|
||||||
|
includes identifying information from the local process, which may be
|
||||||
|
undesirable in some circumstances.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_buildah_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "buildah" program for the 'buildah' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_docker_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "docker" program for the 'docker' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_kubectl_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "kubectl" program for the 'docker' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_lxc_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "lxc" program for the 'lxd' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_lxc_attach_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "lxc-attach" program for the 'lxc' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_lxc_info_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "lxc-info" program for the 'lxc' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_machinectl_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "machinectl" program for the 'setns' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_podman_path(self):
|
||||||
|
"""
|
||||||
|
The path to the "podman" program for the 'podman' transport.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_ssh_keepalive_interval(self):
|
||||||
|
"""
|
||||||
|
The SSH ServerAliveInterval.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_ssh_keepalive_count(self):
|
||||||
|
"""
|
||||||
|
The SSH ServerAliveCount.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_ssh_debug_level(self):
|
||||||
|
"""
|
||||||
|
The SSH debug level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def mitogen_ssh_compression(self):
|
||||||
|
"""
|
||||||
|
Whether SSH compression is enabled.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def extra_args(self):
|
||||||
|
"""
|
||||||
|
Connection-specific arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def ansible_doas_exe(self):
|
||||||
|
"""
|
||||||
|
Value of "ansible_doas_exe" variable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class PlayContextSpec(Spec):
|
||||||
|
"""
|
||||||
|
PlayContextSpec takes almost all its information as-is from Ansible's
|
||||||
|
PlayContext. It is used for normal connections and delegate_to connections,
|
||||||
|
and should always be accurate.
|
||||||
|
"""
|
||||||
|
def __init__(self, connection, play_context, transport, inventory_name):
|
||||||
|
self._connection = connection
|
||||||
|
self._play_context = play_context
|
||||||
|
self._transport = transport
|
||||||
|
self._inventory_name = inventory_name
|
||||||
|
self._task_vars = self._connection._get_task_vars()
|
||||||
|
# used to run interpreter discovery
|
||||||
|
self._action = connection._action
|
||||||
|
|
||||||
|
def transport(self):
|
||||||
|
return self._transport
|
||||||
|
|
||||||
|
def inventory_name(self):
|
||||||
|
return self._inventory_name
|
||||||
|
|
||||||
|
def remote_addr(self):
|
||||||
|
return self._play_context.remote_addr
|
||||||
|
|
||||||
|
def remote_user(self):
|
||||||
|
return self._play_context.remote_user
|
||||||
|
|
||||||
|
def become(self):
|
||||||
|
return self._play_context.become
|
||||||
|
|
||||||
|
def become_method(self):
|
||||||
|
return self._play_context.become_method
|
||||||
|
|
||||||
|
def become_user(self):
|
||||||
|
return self._play_context.become_user
|
||||||
|
|
||||||
|
def become_pass(self):
|
||||||
|
# become_pass is owned/provided by the active become plugin. However
|
||||||
|
# PlayContext is intertwined with it. Known complications
|
||||||
|
# - ansible_become_password is higher priority than ansible_become_pass,
|
||||||
|
# `play_context.become_pass` doesn't obey this (atleast with Mitgeon).
|
||||||
|
# - `meta: reset_connection` runs `connection.reset()` but
|
||||||
|
# `ansible_mitogen.connection.Connection.reset()` recreates the
|
||||||
|
# connection object, setting `connection.become = None`.
|
||||||
|
become_plugin = self._connection.become
|
||||||
|
try:
|
||||||
|
become_pass = become_plugin.get_option('become_pass', playcontext=self._play_context)
|
||||||
|
except AttributeError:
|
||||||
|
become_pass = self._play_context.become_pass
|
||||||
|
return optional_secret(become_pass)
|
||||||
|
|
||||||
|
def password(self):
|
||||||
|
return optional_secret(self._play_context.password)
|
||||||
|
|
||||||
|
def port(self):
|
||||||
|
return self._play_context.port
|
||||||
|
|
||||||
|
def python_path(self, rediscover_python=False):
|
||||||
|
s = self._connection.get_task_var('ansible_python_interpreter')
|
||||||
|
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
||||||
|
# "/usr/bin/python" as the default interpreter path if no other
|
||||||
|
# interpreter is specified.
|
||||||
|
return parse_python_path(
|
||||||
|
s,
|
||||||
|
task_vars=self._task_vars,
|
||||||
|
action=self._action,
|
||||||
|
rediscover_python=rediscover_python)
|
||||||
|
|
||||||
|
def host_key_checking(self):
|
||||||
|
def candidates():
|
||||||
|
yield self._connection.get_task_var('ansible_ssh_host_key_checking')
|
||||||
|
yield self._connection.get_task_var('ansible_host_key_checking')
|
||||||
|
yield C.HOST_KEY_CHECKING
|
||||||
|
val = next((v for v in candidates() if v is not None), True)
|
||||||
|
return boolean(val)
|
||||||
|
|
||||||
|
def private_key_file(self):
|
||||||
|
return self._play_context.private_key_file
|
||||||
|
|
||||||
|
def ssh_executable(self):
|
||||||
|
return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
|
||||||
|
|
||||||
|
def timeout(self):
|
||||||
|
return self._play_context.timeout
|
||||||
|
|
||||||
|
def ansible_ssh_timeout(self):
|
||||||
|
return (
|
||||||
|
self._connection.get_task_var('ansible_timeout') or
|
||||||
|
self._connection.get_task_var('ansible_ssh_timeout') or
|
||||||
|
self.timeout()
|
||||||
|
)
|
||||||
|
|
||||||
|
def ssh_args(self):
|
||||||
|
return [
|
||||||
|
mitogen.core.to_text(term)
|
||||||
|
for s in (
|
||||||
|
C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
|
||||||
|
C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
|
||||||
|
C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
|
||||||
|
)
|
||||||
|
for term in ansible.utils.shlex.shlex_split(s or '')
|
||||||
|
]
|
||||||
|
|
||||||
|
def become_exe(self):
|
||||||
|
# In Ansible 2.8, PlayContext.become_exe always has a default value due
|
||||||
|
# to the new options mechanism. Previously it was only set if a value
|
||||||
|
# ("somewhere") had been specified for the task.
|
||||||
|
# For consistency in the tests, here we make older Ansibles behave like
|
||||||
|
# newer Ansibles.
|
||||||
|
exe = self._play_context.become_exe
|
||||||
|
if exe is None and self._play_context.become_method == 'sudo':
|
||||||
|
exe = 'sudo'
|
||||||
|
return exe
|
||||||
|
|
||||||
|
def sudo_args(self):
|
||||||
|
return [
|
||||||
|
mitogen.core.to_text(term)
|
||||||
|
for term in ansible.utils.shlex.shlex_split(
|
||||||
|
first_true((
|
||||||
|
self._play_context.become_flags,
|
||||||
|
# Ansible <=2.7.
|
||||||
|
getattr(self._play_context, 'sudo_flags', ''),
|
||||||
|
# Ansible <=2.3.
|
||||||
|
getattr(C, 'DEFAULT_BECOME_FLAGS', ''),
|
||||||
|
getattr(C, 'DEFAULT_SUDO_FLAGS', '')
|
||||||
|
), default='')
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def mitogen_via(self):
|
||||||
|
return self._connection.get_task_var('mitogen_via')
|
||||||
|
|
||||||
|
def mitogen_kind(self):
|
||||||
|
return self._connection.get_task_var('mitogen_kind')
|
||||||
|
|
||||||
|
def mitogen_mask_remote_name(self):
|
||||||
|
return self._connection.get_task_var('mitogen_mask_remote_name')
|
||||||
|
|
||||||
|
def mitogen_buildah_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_buildah_path')
|
||||||
|
|
||||||
|
def mitogen_docker_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_docker_path')
|
||||||
|
|
||||||
|
def mitogen_kubectl_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_kubectl_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_lxc_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_attach_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_lxc_attach_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_info_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_lxc_info_path')
|
||||||
|
|
||||||
|
def mitogen_podman_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_podman_path')
|
||||||
|
|
||||||
|
def mitogen_ssh_keepalive_interval(self):
|
||||||
|
return self._connection.get_task_var('mitogen_ssh_keepalive_interval')
|
||||||
|
|
||||||
|
def mitogen_ssh_keepalive_count(self):
|
||||||
|
return self._connection.get_task_var('mitogen_ssh_keepalive_count')
|
||||||
|
|
||||||
|
def mitogen_machinectl_path(self):
|
||||||
|
return self._connection.get_task_var('mitogen_machinectl_path')
|
||||||
|
|
||||||
|
def mitogen_ssh_debug_level(self):
|
||||||
|
return self._connection.get_task_var('mitogen_ssh_debug_level')
|
||||||
|
|
||||||
|
def mitogen_ssh_compression(self):
|
||||||
|
return self._connection.get_task_var('mitogen_ssh_compression')
|
||||||
|
|
||||||
|
def extra_args(self):
|
||||||
|
return self._connection.get_extra_args()
|
||||||
|
|
||||||
|
def ansible_doas_exe(self):
|
||||||
|
return (
|
||||||
|
self._connection.get_task_var('ansible_doas_exe') or
|
||||||
|
os.environ.get('ANSIBLE_DOAS_EXE')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MitogenViaSpec(Spec):
|
||||||
|
"""
|
||||||
|
MitogenViaSpec takes most of its information from the HostVars of the
|
||||||
|
running task. HostVars is a lightweight wrapper around VariableManager, so
|
||||||
|
it is better to say that VariableManager.get_vars() is the ultimate source
|
||||||
|
of MitogenViaSpec's information.
|
||||||
|
|
||||||
|
Due to this, mitogen_via= hosts must have all their configuration
|
||||||
|
information represented as host and group variables. We cannot use any
|
||||||
|
per-task configuration, as all that data belongs to the real target host.
|
||||||
|
|
||||||
|
Ansible uses all kinds of strange historical logic for calculating
|
||||||
|
variables, including making their precedence configurable. MitogenViaSpec
|
||||||
|
must ultimately reimplement all of that logic. It is likely that if you are
|
||||||
|
having a configruation problem with connection delegation, the answer to
|
||||||
|
your problem lies in the method implementations below!
|
||||||
|
"""
|
||||||
|
def __init__(self, inventory_name, host_vars, task_vars, become_method, become_user,
|
||||||
|
play_context, action):
|
||||||
|
"""
|
||||||
|
:param str inventory_name:
|
||||||
|
The inventory name of the intermediary machine, i.e. not the target
|
||||||
|
machine.
|
||||||
|
:param dict host_vars:
|
||||||
|
The HostVars magic dictionary provided by Ansible in task_vars.
|
||||||
|
:param dict task_vars:
|
||||||
|
Task vars provided by Ansible.
|
||||||
|
:param str become_method:
|
||||||
|
If the mitogen_via= spec included a become method, the method it
|
||||||
|
specifies.
|
||||||
|
:param str become_user:
|
||||||
|
If the mitogen_via= spec included a become user, the user it
|
||||||
|
specifies.
|
||||||
|
:param PlayContext play_context:
|
||||||
|
For some global values **only**, the PlayContext used to describe
|
||||||
|
the real target machine. Values from this object are **strictly
|
||||||
|
restricted** to values that are Ansible-global, e.g. the passwords
|
||||||
|
specified interactively.
|
||||||
|
:param ActionModuleMixin action:
|
||||||
|
Backref to the ActionModuleMixin required for ansible interpreter discovery
|
||||||
|
"""
|
||||||
|
self._inventory_name = inventory_name
|
||||||
|
self._host_vars = host_vars
|
||||||
|
self._task_vars = task_vars
|
||||||
|
self._become_method = become_method
|
||||||
|
self._become_user = become_user
|
||||||
|
# Dangerous! You may find a variable you want in this object, but it's
|
||||||
|
# almost certainly for the wrong machine!
|
||||||
|
self._dangerous_play_context = play_context
|
||||||
|
self._action = action
|
||||||
|
|
||||||
|
def transport(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_connection') or
|
||||||
|
C.DEFAULT_TRANSPORT
|
||||||
|
)
|
||||||
|
|
||||||
|
def inventory_name(self):
|
||||||
|
return self._inventory_name
|
||||||
|
|
||||||
|
def remote_addr(self):
|
||||||
|
# play_context.py::MAGIC_VARIABLE_MAPPING
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_ssh_host') or
|
||||||
|
self._host_vars.get('ansible_host') or
|
||||||
|
self._inventory_name
|
||||||
|
)
|
||||||
|
|
||||||
|
def remote_user(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_ssh_user') or
|
||||||
|
self._host_vars.get('ansible_user') or
|
||||||
|
C.DEFAULT_REMOTE_USER
|
||||||
|
)
|
||||||
|
|
||||||
|
def become(self):
|
||||||
|
return bool(self._become_user)
|
||||||
|
|
||||||
|
def become_method(self):
|
||||||
|
return (
|
||||||
|
self._become_method or
|
||||||
|
self._host_vars.get('ansible_become_method') or
|
||||||
|
C.DEFAULT_BECOME_METHOD
|
||||||
|
)
|
||||||
|
|
||||||
|
def become_user(self):
|
||||||
|
return self._become_user
|
||||||
|
|
||||||
|
def become_pass(self):
|
||||||
|
return optional_secret(
|
||||||
|
self._host_vars.get('ansible_become_pass') or
|
||||||
|
self._host_vars.get('ansible_become_password')
|
||||||
|
)
|
||||||
|
|
||||||
|
def password(self):
|
||||||
|
return optional_secret(
|
||||||
|
self._host_vars.get('ansible_ssh_pass') or
|
||||||
|
self._host_vars.get('ansible_password')
|
||||||
|
)
|
||||||
|
|
||||||
|
def port(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_ssh_port') or
|
||||||
|
self._host_vars.get('ansible_port') or
|
||||||
|
C.DEFAULT_REMOTE_PORT
|
||||||
|
)
|
||||||
|
|
||||||
|
def python_path(self, rediscover_python=False):
|
||||||
|
s = self._host_vars.get('ansible_python_interpreter')
|
||||||
|
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
||||||
|
# "/usr/bin/python" as the default interpreter path if no other
|
||||||
|
# interpreter is specified.
|
||||||
|
return parse_python_path(
|
||||||
|
s,
|
||||||
|
task_vars=self._task_vars,
|
||||||
|
action=self._action,
|
||||||
|
rediscover_python=rediscover_python)
|
||||||
|
|
||||||
|
def host_key_checking(self):
|
||||||
|
def candidates():
|
||||||
|
yield self._host_vars.get('ansible_ssh_host_key_checking')
|
||||||
|
yield self._host_vars.get('ansible_host_key_checking')
|
||||||
|
yield C.HOST_KEY_CHECKING
|
||||||
|
val = next((v for v in candidates() if v is not None), True)
|
||||||
|
return boolean(val)
|
||||||
|
|
||||||
|
def private_key_file(self):
|
||||||
|
# TODO: must come from PlayContext too.
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_ssh_private_key_file') or
|
||||||
|
self._host_vars.get('ansible_private_key_file') or
|
||||||
|
C.DEFAULT_PRIVATE_KEY_FILE
|
||||||
|
)
|
||||||
|
|
||||||
|
def ssh_executable(self):
|
||||||
|
return C.config.get_config_value("ssh_executable", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
|
||||||
|
|
||||||
|
def timeout(self):
|
||||||
|
# TODO: must come from PlayContext too.
|
||||||
|
return C.DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
def ansible_ssh_timeout(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_timeout') or
|
||||||
|
self._host_vars.get('ansible_ssh_timeout') or
|
||||||
|
self.timeout()
|
||||||
|
)
|
||||||
|
|
||||||
|
def ssh_args(self):
|
||||||
|
return [
|
||||||
|
mitogen.core.to_text(term)
|
||||||
|
for s in (
|
||||||
|
C.config.get_config_value("ssh_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
|
||||||
|
C.config.get_config_value("ssh_common_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {})),
|
||||||
|
C.config.get_config_value("ssh_extra_args", plugin_type="connection", plugin_name="ssh", variables=self._task_vars.get("vars", {}))
|
||||||
|
)
|
||||||
|
for term in ansible.utils.shlex.shlex_split(s)
|
||||||
|
if s
|
||||||
|
]
|
||||||
|
|
||||||
|
def become_exe(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_become_exe') or
|
||||||
|
C.DEFAULT_BECOME_EXE
|
||||||
|
)
|
||||||
|
|
||||||
|
def sudo_args(self):
|
||||||
|
return [
|
||||||
|
mitogen.core.to_text(term)
|
||||||
|
for s in (
|
||||||
|
self._host_vars.get('ansible_sudo_flags') or '',
|
||||||
|
self._host_vars.get('ansible_become_flags') or '',
|
||||||
|
)
|
||||||
|
for term in ansible.utils.shlex.shlex_split(s)
|
||||||
|
]
|
||||||
|
|
||||||
|
def mitogen_via(self):
|
||||||
|
return self._host_vars.get('mitogen_via')
|
||||||
|
|
||||||
|
def mitogen_kind(self):
|
||||||
|
return self._host_vars.get('mitogen_kind')
|
||||||
|
|
||||||
|
def mitogen_mask_remote_name(self):
|
||||||
|
return self._host_vars.get('mitogen_mask_remote_name')
|
||||||
|
|
||||||
|
def mitogen_buildah_path(self):
|
||||||
|
return self._host_vars.get('mitogen_buildah_path')
|
||||||
|
|
||||||
|
def mitogen_docker_path(self):
|
||||||
|
return self._host_vars.get('mitogen_docker_path')
|
||||||
|
|
||||||
|
def mitogen_kubectl_path(self):
|
||||||
|
return self._host_vars.get('mitogen_kubectl_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_path(self):
|
||||||
|
return self._host_vars.get('mitogen_lxc_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_attach_path(self):
|
||||||
|
return self._host_vars.get('mitogen_lxc_attach_path')
|
||||||
|
|
||||||
|
def mitogen_lxc_info_path(self):
|
||||||
|
return self._host_vars.get('mitogen_lxc_info_path')
|
||||||
|
|
||||||
|
def mitogen_podman_path(self):
|
||||||
|
return self._host_vars.get('mitogen_podman_path')
|
||||||
|
|
||||||
|
def mitogen_ssh_keepalive_interval(self):
|
||||||
|
return self._host_vars.get('mitogen_ssh_keepalive_interval')
|
||||||
|
|
||||||
|
def mitogen_ssh_keepalive_count(self):
|
||||||
|
return self._host_vars.get('mitogen_ssh_keepalive_count')
|
||||||
|
|
||||||
|
def mitogen_machinectl_path(self):
|
||||||
|
return self._host_vars.get('mitogen_machinectl_path')
|
||||||
|
|
||||||
|
def mitogen_ssh_debug_level(self):
|
||||||
|
return self._host_vars.get('mitogen_ssh_debug_level')
|
||||||
|
|
||||||
|
def mitogen_ssh_compression(self):
|
||||||
|
return self._host_vars.get('mitogen_ssh_compression')
|
||||||
|
|
||||||
|
def extra_args(self):
|
||||||
|
return [] # TODO
|
||||||
|
|
||||||
|
def ansible_doas_exe(self):
|
||||||
|
return (
|
||||||
|
self._host_vars.get('ansible_doas_exe') or
|
||||||
|
os.environ.get('ANSIBLE_DOAS_EXE')
|
||||||
|
)
|
29
mitogen-0.3.9/ansible_mitogen/utils/__init__.py
Normal file
29
mitogen-0.3.9/ansible_mitogen/utils/__init__.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'ansible_version',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _parse(v_string):
|
||||||
|
# Adapted from distutils.version.LooseVersion.parse()
|
||||||
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
||||||
|
for component in component_re.split(v_string):
|
||||||
|
if not component or component == '.':
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
yield int(component)
|
||||||
|
except ValueError:
|
||||||
|
yield component
|
||||||
|
|
||||||
|
|
||||||
|
ansible_version = tuple(_parse(ansible.__version__))
|
||||||
|
|
||||||
|
del _parse
|
||||||
|
del re
|
||||||
|
del ansible
|
Binary file not shown.
Binary file not shown.
79
mitogen-0.3.9/ansible_mitogen/utils/unsafe.py
Normal file
79
mitogen-0.3.9/ansible_mitogen/utils/unsafe.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
import ansible.utils.unsafe_proxy
|
||||||
|
|
||||||
|
import ansible_mitogen.utils
|
||||||
|
|
||||||
|
import mitogen
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.utils
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'cast',
|
||||||
|
]
|
||||||
|
|
||||||
|
def _cast_to_dict(obj): return {cast(k): cast(v) for k, v in obj.items()}
|
||||||
|
def _cast_to_list(obj): return [cast(v) for v in obj]
|
||||||
|
def _cast_unsafe(obj): return obj._strip_unsafe()
|
||||||
|
def _passthrough(obj): return obj
|
||||||
|
|
||||||
|
|
||||||
|
# A dispatch table to cast objects based on their exact type.
|
||||||
|
# This is an optimisation, reliable fallbacks are required (e.g. isinstance())
|
||||||
|
_CAST_DISPATCH = {
|
||||||
|
bytes: bytes,
|
||||||
|
dict: _cast_to_dict,
|
||||||
|
list: _cast_to_list,
|
||||||
|
tuple: _cast_to_list,
|
||||||
|
mitogen.core.UnicodeType: mitogen.core.UnicodeType,
|
||||||
|
}
|
||||||
|
_CAST_DISPATCH.update({t: _passthrough for t in mitogen.utils.PASSTHROUGH})
|
||||||
|
|
||||||
|
if hasattr(ansible.utils.unsafe_proxy.AnsibleUnsafeText, '_strip_unsafe'):
|
||||||
|
_CAST_DISPATCH.update({
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: _cast_unsafe,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeText: _cast_unsafe,
|
||||||
|
ansible.utils.unsafe_proxy.NativeJinjaUnsafeText: _cast_unsafe,
|
||||||
|
})
|
||||||
|
elif ansible_mitogen.utils.ansible_version[:2] <= (2, 16):
|
||||||
|
_CAST_DISPATCH.update({
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: bytes,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeText: mitogen.core.UnicodeType,
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
mitogen_ver = '.'.join(str(v) for v in mitogen.__version__)
|
||||||
|
raise ImportError("Mitogen %s can't unwrap Ansible %s AnsibleUnsafe objects"
|
||||||
|
% (mitogen_ver, ansible.__version__))
|
||||||
|
|
||||||
|
|
||||||
|
def cast(obj):
|
||||||
|
"""
|
||||||
|
Return obj (or a copy) with subtypes of builtins cast to their supertype.
|
||||||
|
|
||||||
|
This is an enhanced version of :func:`mitogen.utils.cast`. In addition it
|
||||||
|
handles ``ansible.utils.unsafe_proxy.AnsibleUnsafeText`` and variants.
|
||||||
|
|
||||||
|
There are types handled by :func:`ansible.utils.unsafe_proxy.wrap_var()`
|
||||||
|
that this function currently does not handle (e.g. `set()`), or preserve
|
||||||
|
preserve (e.g. `tuple()`). Future enhancements may change this.
|
||||||
|
|
||||||
|
:param obj:
|
||||||
|
Object to undecorate.
|
||||||
|
:returns:
|
||||||
|
Undecorated object.
|
||||||
|
"""
|
||||||
|
# Fast path: obj is a known type, dispatch directly
|
||||||
|
try:
|
||||||
|
unwrapper = _CAST_DISPATCH[type(obj)]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return unwrapper(obj)
|
||||||
|
|
||||||
|
# Slow path: obj is some unknown subclass
|
||||||
|
if isinstance(obj, dict): return _cast_to_dict(obj)
|
||||||
|
if isinstance(obj, (list, tuple)): return _cast_to_list(obj)
|
||||||
|
|
||||||
|
return mitogen.utils.cast(obj)
|
39
mitogen-0.3.9/mitogen.egg-info/PKG-INFO
Normal file
39
mitogen-0.3.9/mitogen.egg-info/PKG-INFO
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: mitogen
|
||||||
|
Version: 0.3.9
|
||||||
|
Summary: Library for writing distributed self-replicating programs.
|
||||||
|
Home-page: https://github.com/mitogen-hq/mitogen/
|
||||||
|
Author: David Wilson
|
||||||
|
License: New BSD
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Framework :: Ansible
|
||||||
|
Classifier: Intended Audience :: System Administrators
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Topic :: System :: Distributed Computing
|
||||||
|
Classifier: Topic :: System :: Systems Administration
|
||||||
|
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
License-File: LICENSE
|
||||||
|
|
||||||
|
# Mitogen
|
||||||
|
|
||||||
|
<a href="https://mitogen.networkgenomics.com/">Please see the documentation</a>.
|
||||||
|
|
||||||
|
![](https://i.imgur.com/eBM6LhJ.gif)
|
||||||
|
|
||||||
|
[![Total alerts](https://img.shields.io/lgtm/alerts/g/mitogen-hq/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mitogen-hq/mitogen/alerts/)
|
||||||
|
|
||||||
|
[![Build Status](https://dev.azure.com/mitogen-hq/mitogen/_apis/build/status/mitogen-hq.mitogen?branchName=master)](https://dev.azure.com/mitogen-hq/mitogen/_build/latest?definitionId=1&branchName=master)
|
82
mitogen-0.3.9/mitogen.egg-info/SOURCES.txt
Normal file
82
mitogen-0.3.9/mitogen.egg-info/SOURCES.txt
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
LICENSE
|
||||||
|
MANIFEST.in
|
||||||
|
README.md
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
ansible_mitogen/__init__.py
|
||||||
|
ansible_mitogen/affinity.py
|
||||||
|
ansible_mitogen/connection.py
|
||||||
|
ansible_mitogen/loaders.py
|
||||||
|
ansible_mitogen/logging.py
|
||||||
|
ansible_mitogen/mixins.py
|
||||||
|
ansible_mitogen/module_finder.py
|
||||||
|
ansible_mitogen/parsing.py
|
||||||
|
ansible_mitogen/planner.py
|
||||||
|
ansible_mitogen/process.py
|
||||||
|
ansible_mitogen/runner.py
|
||||||
|
ansible_mitogen/services.py
|
||||||
|
ansible_mitogen/strategy.py
|
||||||
|
ansible_mitogen/target.py
|
||||||
|
ansible_mitogen/transport_config.py
|
||||||
|
ansible_mitogen/compat/__init__.py
|
||||||
|
ansible_mitogen/plugins/__init__.py
|
||||||
|
ansible_mitogen/plugins/action/__init__.py
|
||||||
|
ansible_mitogen/plugins/action/mitogen_fetch.py
|
||||||
|
ansible_mitogen/plugins/action/mitogen_get_stack.py
|
||||||
|
ansible_mitogen/plugins/connection/__init__.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_buildah.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_doas.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_docker.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_jail.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_kubectl.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_local.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_lxc.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_lxd.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_machinectl.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_podman.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_setns.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_ssh.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_su.py
|
||||||
|
ansible_mitogen/plugins/connection/mitogen_sudo.py
|
||||||
|
ansible_mitogen/plugins/strategy/__init__.py
|
||||||
|
ansible_mitogen/plugins/strategy/mitogen.py
|
||||||
|
ansible_mitogen/plugins/strategy/mitogen_free.py
|
||||||
|
ansible_mitogen/plugins/strategy/mitogen_host_pinned.py
|
||||||
|
ansible_mitogen/plugins/strategy/mitogen_linear.py
|
||||||
|
ansible_mitogen/utils/__init__.py
|
||||||
|
ansible_mitogen/utils/unsafe.py
|
||||||
|
mitogen/__init__.py
|
||||||
|
mitogen/buildah.py
|
||||||
|
mitogen/core.py
|
||||||
|
mitogen/debug.py
|
||||||
|
mitogen/doas.py
|
||||||
|
mitogen/docker.py
|
||||||
|
mitogen/fakessh.py
|
||||||
|
mitogen/fork.py
|
||||||
|
mitogen/jail.py
|
||||||
|
mitogen/kubectl.py
|
||||||
|
mitogen/lxc.py
|
||||||
|
mitogen/lxd.py
|
||||||
|
mitogen/master.py
|
||||||
|
mitogen/minify.py
|
||||||
|
mitogen/os_fork.py
|
||||||
|
mitogen/parent.py
|
||||||
|
mitogen/podman.py
|
||||||
|
mitogen/profiler.py
|
||||||
|
mitogen/select.py
|
||||||
|
mitogen/service.py
|
||||||
|
mitogen/setns.py
|
||||||
|
mitogen/ssh.py
|
||||||
|
mitogen/su.py
|
||||||
|
mitogen/sudo.py
|
||||||
|
mitogen/unix.py
|
||||||
|
mitogen/utils.py
|
||||||
|
mitogen.egg-info/PKG-INFO
|
||||||
|
mitogen.egg-info/SOURCES.txt
|
||||||
|
mitogen.egg-info/dependency_links.txt
|
||||||
|
mitogen.egg-info/not-zip-safe
|
||||||
|
mitogen.egg-info/top_level.txt
|
||||||
|
mitogen/compat/__init__.py
|
||||||
|
mitogen/compat/pkgutil.py
|
||||||
|
mitogen/compat/tokenize.py
|
||||||
|
tests/testlib.py
|
1
mitogen-0.3.9/mitogen.egg-info/dependency_links.txt
Normal file
1
mitogen-0.3.9/mitogen.egg-info/dependency_links.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
1
mitogen-0.3.9/mitogen.egg-info/not-zip-safe
Normal file
1
mitogen-0.3.9/mitogen.egg-info/not-zip-safe
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
2
mitogen-0.3.9/mitogen.egg-info/top_level.txt
Normal file
2
mitogen-0.3.9/mitogen.egg-info/top_level.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
ansible_mitogen
|
||||||
|
mitogen
|
120
mitogen-0.3.9/mitogen/__init__.py
Normal file
120
mitogen-0.3.9/mitogen/__init__.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
"""
|
||||||
|
On the Mitogen master, this is imported from ``mitogen/__init__.py`` as would
|
||||||
|
be expected. On the slave, it is built dynamically during startup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
#: Library version as a tuple.
|
||||||
|
__version__ = (0, 3, 9)
|
||||||
|
|
||||||
|
|
||||||
|
#: This is :data:`False` in slave contexts. Previously it was used to prevent
|
||||||
|
#: re-execution of :mod:`__main__` in single file programs, however that now
|
||||||
|
#: happens automatically.
|
||||||
|
is_master = True
|
||||||
|
|
||||||
|
|
||||||
|
#: This is `0` in a master, otherwise it is the master-assigned ID unique to
|
||||||
|
#: the slave context used for message routing.
|
||||||
|
context_id = 0
|
||||||
|
|
||||||
|
|
||||||
|
#: This is :data:`None` in a master, otherwise it is the master-assigned ID
|
||||||
|
#: unique to the slave's parent context.
|
||||||
|
parent_id = None
|
||||||
|
|
||||||
|
|
||||||
|
#: This is an empty list in a master, otherwise it is a list of parent context
|
||||||
|
#: IDs ordered from most direct to least direct.
|
||||||
|
parent_ids = []
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
_default_profiling = os.environ.get('MITOGEN_PROFILING') is not None
|
||||||
|
del os
|
||||||
|
|
||||||
|
|
||||||
|
def main(log_level='INFO', profiling=_default_profiling):
|
||||||
|
"""
|
||||||
|
Convenience decorator primarily useful for writing discardable test
|
||||||
|
scripts.
|
||||||
|
|
||||||
|
In the master process, when `func` is defined in the :mod:`__main__`
|
||||||
|
module, arranges for `func(router)` to be invoked immediately, with
|
||||||
|
:py:class:`mitogen.master.Router` construction and destruction handled just
|
||||||
|
as in :py:func:`mitogen.utils.run_with_router`. In slaves, this function
|
||||||
|
does nothing.
|
||||||
|
|
||||||
|
:param str log_level:
|
||||||
|
Logging package level to configure via
|
||||||
|
:py:func:`mitogen.utils.log_to_file`.
|
||||||
|
|
||||||
|
:param bool profiling:
|
||||||
|
If :py:data:`True`, equivalent to setting
|
||||||
|
:py:attr:`mitogen.master.Router.profiling` prior to router
|
||||||
|
construction. This causes ``/tmp`` files to be created everywhere at
|
||||||
|
the end of a successful run with :py:mod:`cProfile` output for every
|
||||||
|
thread.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
import mitogen
|
||||||
|
import requests
|
||||||
|
|
||||||
|
def get_url(url):
|
||||||
|
return requests.get(url).text
|
||||||
|
|
||||||
|
@mitogen.main()
|
||||||
|
def main(router):
|
||||||
|
z = router.ssh(hostname='k3')
|
||||||
|
print(z.call(get_url, 'https://example.org/')))))
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrapper(func):
|
||||||
|
if func.__module__ != '__main__':
|
||||||
|
return func
|
||||||
|
import mitogen.parent
|
||||||
|
import mitogen.utils
|
||||||
|
if profiling:
|
||||||
|
mitogen.core.enable_profiling()
|
||||||
|
mitogen.master.Router.profiling = profiling
|
||||||
|
mitogen.utils.log_to_file(level=log_level)
|
||||||
|
return mitogen.core._profile_hook(
|
||||||
|
'app.main',
|
||||||
|
mitogen.utils.run_with_router,
|
||||||
|
func,
|
||||||
|
)
|
||||||
|
return wrapper
|
BIN
mitogen-0.3.9/mitogen/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/core.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/core.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/debug.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/debug.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/fork.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/fork.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/master.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/master.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/minify.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/minify.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/parent.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/parent.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/select.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/select.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/service.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/service.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/ssh.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/ssh.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/sudo.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/sudo.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/unix.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/unix.cpython-310.pyc
Normal file
Binary file not shown.
BIN
mitogen-0.3.9/mitogen/__pycache__/utils.cpython-310.pyc
Normal file
BIN
mitogen-0.3.9/mitogen/__pycache__/utils.cpython-310.pyc
Normal file
Binary file not shown.
72
mitogen-0.3.9/mitogen/buildah.py
Normal file
72
mitogen-0.3.9/mitogen/buildah.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# Copyright 2019, David Wilson
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import mitogen.parent
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Options(mitogen.parent.Options):
|
||||||
|
container = None
|
||||||
|
username = None
|
||||||
|
buildah_path = 'buildah'
|
||||||
|
|
||||||
|
def __init__(self, container=None, buildah_path=None, username=None,
|
||||||
|
**kwargs):
|
||||||
|
super(Options, self).__init__(**kwargs)
|
||||||
|
assert container is not None
|
||||||
|
self.container = container
|
||||||
|
if buildah_path:
|
||||||
|
self.buildah_path = buildah_path
|
||||||
|
if username:
|
||||||
|
self.username = username
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(mitogen.parent.Connection):
|
||||||
|
options_class = Options
|
||||||
|
child_is_immediate_subprocess = False
|
||||||
|
|
||||||
|
# TODO: better way of capturing errors such as "No such container."
|
||||||
|
create_child_args = {
|
||||||
|
'merge_stdio': True
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_name(self):
|
||||||
|
return u'buildah.' + self.options.container
|
||||||
|
|
||||||
|
def get_boot_command(self):
|
||||||
|
args = [self.options.buildah_path, 'run']
|
||||||
|
if self.options.username:
|
||||||
|
args += ['--user=' + self.options.username]
|
||||||
|
args += ['--', self.options.container]
|
||||||
|
return args + super(Connection, self).get_boot_command()
|
0
mitogen-0.3.9/mitogen/compat/__init__.py
Normal file
0
mitogen-0.3.9/mitogen/compat/__init__.py
Normal file
594
mitogen-0.3.9/mitogen/compat/pkgutil.py
Normal file
594
mitogen-0.3.9/mitogen/compat/pkgutil.py
Normal file
@ -0,0 +1,594 @@
|
|||||||
|
"""Utilities to support packages."""
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
# NOTE: This module must remain compatible with Python 2.3, as it is shared
|
||||||
|
# by setuptools for distribution with Python 2.3 and up.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
|
||||||
|
'walk_packages', 'iter_modules', 'get_data',
|
||||||
|
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
|
||||||
|
]
|
||||||
|
|
||||||
|
def read_code(stream):
|
||||||
|
# This helper is needed in order for the PEP 302 emulation to
|
||||||
|
# correctly handle compiled files
|
||||||
|
import marshal
|
||||||
|
|
||||||
|
magic = stream.read(4)
|
||||||
|
if magic != imp.get_magic():
|
||||||
|
return None
|
||||||
|
|
||||||
|
stream.read(4) # Skip timestamp
|
||||||
|
return marshal.load(stream)
|
||||||
|
|
||||||
|
|
||||||
|
def simplegeneric(func):
|
||||||
|
"""Make a trivial single-dispatch generic function"""
|
||||||
|
registry = {}
|
||||||
|
def wrapper(*args, **kw):
|
||||||
|
ob = args[0]
|
||||||
|
try:
|
||||||
|
cls = ob.__class__
|
||||||
|
except AttributeError:
|
||||||
|
cls = type(ob)
|
||||||
|
try:
|
||||||
|
mro = cls.__mro__
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
class cls(cls, object):
|
||||||
|
pass
|
||||||
|
mro = cls.__mro__[1:]
|
||||||
|
except TypeError:
|
||||||
|
mro = object, # must be an ExtensionClass or some such :(
|
||||||
|
for t in mro:
|
||||||
|
if t in registry:
|
||||||
|
return registry[t](*args, **kw)
|
||||||
|
else:
|
||||||
|
return func(*args, **kw)
|
||||||
|
try:
|
||||||
|
wrapper.__name__ = func.__name__
|
||||||
|
except (TypeError, AttributeError):
|
||||||
|
pass # Python 2.3 doesn't allow functions to be renamed
|
||||||
|
|
||||||
|
def register(typ, func=None):
|
||||||
|
if func is None:
|
||||||
|
return lambda f: register(typ, f)
|
||||||
|
registry[typ] = func
|
||||||
|
return func
|
||||||
|
|
||||||
|
wrapper.__dict__ = func.__dict__
|
||||||
|
wrapper.__doc__ = func.__doc__
|
||||||
|
wrapper.register = register
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def walk_packages(path=None, prefix='', onerror=None):
|
||||||
|
"""Yields (module_loader, name, ispkg) for all modules recursively
|
||||||
|
on path, or, if path is None, all accessible modules.
|
||||||
|
|
||||||
|
'path' should be either None or a list of paths to look for
|
||||||
|
modules in.
|
||||||
|
|
||||||
|
'prefix' is a string to output on the front of every module name
|
||||||
|
on output.
|
||||||
|
|
||||||
|
Note that this function must import all *packages* (NOT all
|
||||||
|
modules!) on the given path, in order to access the __path__
|
||||||
|
attribute to find submodules.
|
||||||
|
|
||||||
|
'onerror' is a function which gets called with one argument (the
|
||||||
|
name of the package which was being imported) if any exception
|
||||||
|
occurs while trying to import a package. If no onerror function is
|
||||||
|
supplied, ImportErrors are caught and ignored, while all other
|
||||||
|
exceptions are propagated, terminating the search.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
# list all modules python can access
|
||||||
|
walk_packages()
|
||||||
|
|
||||||
|
# list all submodules of ctypes
|
||||||
|
walk_packages(ctypes.__path__, ctypes.__name__+'.')
|
||||||
|
"""
|
||||||
|
|
||||||
|
def seen(p, m={}):
|
||||||
|
if p in m:
|
||||||
|
return True
|
||||||
|
m[p] = True
|
||||||
|
|
||||||
|
for importer, name, ispkg in iter_modules(path, prefix):
|
||||||
|
yield importer, name, ispkg
|
||||||
|
|
||||||
|
if ispkg:
|
||||||
|
try:
|
||||||
|
__import__(name)
|
||||||
|
except ImportError:
|
||||||
|
if onerror is not None:
|
||||||
|
onerror(name)
|
||||||
|
except Exception:
|
||||||
|
if onerror is not None:
|
||||||
|
onerror(name)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
path = getattr(sys.modules[name], '__path__', None) or []
|
||||||
|
|
||||||
|
# don't traverse path items we've seen before
|
||||||
|
path = [p for p in path if not seen(p)]
|
||||||
|
|
||||||
|
for item in walk_packages(path, name+'.', onerror):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
|
||||||
|
def iter_modules(path=None, prefix=''):
|
||||||
|
"""Yields (module_loader, name, ispkg) for all submodules on path,
|
||||||
|
or, if path is None, all top-level modules on sys.path.
|
||||||
|
|
||||||
|
'path' should be either None or a list of paths to look for
|
||||||
|
modules in.
|
||||||
|
|
||||||
|
'prefix' is a string to output on the front of every module name
|
||||||
|
on output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if path is None:
|
||||||
|
importers = iter_importers()
|
||||||
|
else:
|
||||||
|
importers = map(get_importer, path)
|
||||||
|
|
||||||
|
yielded = {}
|
||||||
|
for i in importers:
|
||||||
|
for name, ispkg in iter_importer_modules(i, prefix):
|
||||||
|
if name not in yielded:
|
||||||
|
yielded[name] = 1
|
||||||
|
yield i, name, ispkg
|
||||||
|
|
||||||
|
|
||||||
|
#@simplegeneric
|
||||||
|
def iter_importer_modules(importer, prefix=''):
|
||||||
|
if not hasattr(importer, 'iter_modules'):
|
||||||
|
return []
|
||||||
|
return importer.iter_modules(prefix)
|
||||||
|
|
||||||
|
iter_importer_modules = simplegeneric(iter_importer_modules)
|
||||||
|
|
||||||
|
|
||||||
|
class ImpImporter:
|
||||||
|
"""PEP 302 Importer that wraps Python's "classic" import algorithm
|
||||||
|
|
||||||
|
ImpImporter(dirname) produces a PEP 302 importer that searches that
|
||||||
|
directory. ImpImporter(None) produces a PEP 302 importer that searches
|
||||||
|
the current sys.path, plus any modules that are frozen or built-in.
|
||||||
|
|
||||||
|
Note that ImpImporter does not currently support being used by placement
|
||||||
|
on sys.meta_path.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path=None):
|
||||||
|
self.path = path
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
# Note: we ignore 'path' argument since it is only used via meta_path
|
||||||
|
subname = fullname.split(".")[-1]
|
||||||
|
if subname != fullname and self.path is None:
|
||||||
|
return None
|
||||||
|
if self.path is None:
|
||||||
|
path = None
|
||||||
|
else:
|
||||||
|
path = [os.path.realpath(self.path)]
|
||||||
|
try:
|
||||||
|
file, filename, etc = imp.find_module(subname, path)
|
||||||
|
except ImportError:
|
||||||
|
return None
|
||||||
|
return ImpLoader(fullname, file, filename, etc)
|
||||||
|
|
||||||
|
def iter_modules(self, prefix=''):
|
||||||
|
if self.path is None or not os.path.isdir(self.path):
|
||||||
|
return
|
||||||
|
|
||||||
|
yielded = {}
|
||||||
|
import inspect
|
||||||
|
try:
|
||||||
|
filenames = os.listdir(self.path)
|
||||||
|
except OSError:
|
||||||
|
# ignore unreadable directories like import does
|
||||||
|
filenames = []
|
||||||
|
filenames.sort() # handle packages before same-named modules
|
||||||
|
|
||||||
|
for fn in filenames:
|
||||||
|
modname = inspect.getmodulename(fn)
|
||||||
|
if modname=='__init__' or modname in yielded:
|
||||||
|
continue
|
||||||
|
|
||||||
|
path = os.path.join(self.path, fn)
|
||||||
|
ispkg = False
|
||||||
|
|
||||||
|
if not modname and os.path.isdir(path) and '.' not in fn:
|
||||||
|
modname = fn
|
||||||
|
try:
|
||||||
|
dircontents = os.listdir(path)
|
||||||
|
except OSError:
|
||||||
|
# ignore unreadable directories like import does
|
||||||
|
dircontents = []
|
||||||
|
for fn in dircontents:
|
||||||
|
subname = inspect.getmodulename(fn)
|
||||||
|
if subname=='__init__':
|
||||||
|
ispkg = True
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
continue # not a package
|
||||||
|
|
||||||
|
if modname and '.' not in modname:
|
||||||
|
yielded[modname] = 1
|
||||||
|
yield prefix + modname, ispkg
|
||||||
|
|
||||||
|
|
||||||
|
class ImpLoader:
|
||||||
|
"""PEP 302 Loader that wraps Python's "classic" import algorithm
|
||||||
|
"""
|
||||||
|
code = source = None
|
||||||
|
|
||||||
|
def __init__(self, fullname, file, filename, etc):
|
||||||
|
self.file = file
|
||||||
|
self.filename = filename
|
||||||
|
self.fullname = fullname
|
||||||
|
self.etc = etc
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
self._reopen()
|
||||||
|
try:
|
||||||
|
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
|
||||||
|
finally:
|
||||||
|
if self.file:
|
||||||
|
self.file.close()
|
||||||
|
# Note: we don't set __loader__ because we want the module to look
|
||||||
|
# normal; i.e. this is just a wrapper for standard import machinery
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def get_data(self, pathname):
|
||||||
|
return open(pathname, "rb").read()
|
||||||
|
|
||||||
|
def _reopen(self):
|
||||||
|
if self.file and self.file.closed:
|
||||||
|
mod_type = self.etc[2]
|
||||||
|
if mod_type==imp.PY_SOURCE:
|
||||||
|
self.file = open(self.filename, 'rU')
|
||||||
|
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
|
||||||
|
self.file = open(self.filename, 'rb')
|
||||||
|
|
||||||
|
def _fix_name(self, fullname):
|
||||||
|
if fullname is None:
|
||||||
|
fullname = self.fullname
|
||||||
|
elif fullname != self.fullname:
|
||||||
|
raise ImportError("Loader for module %s cannot handle "
|
||||||
|
"module %s" % (self.fullname, fullname))
|
||||||
|
return fullname
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
fullname = self._fix_name(fullname)
|
||||||
|
return self.etc[2]==imp.PKG_DIRECTORY
|
||||||
|
|
||||||
|
def get_code(self, fullname=None):
|
||||||
|
fullname = self._fix_name(fullname)
|
||||||
|
if self.code is None:
|
||||||
|
mod_type = self.etc[2]
|
||||||
|
if mod_type==imp.PY_SOURCE:
|
||||||
|
source = self.get_source(fullname)
|
||||||
|
self.code = compile(source, self.filename, 'exec')
|
||||||
|
elif mod_type==imp.PY_COMPILED:
|
||||||
|
self._reopen()
|
||||||
|
try:
|
||||||
|
self.code = read_code(self.file)
|
||||||
|
finally:
|
||||||
|
self.file.close()
|
||||||
|
elif mod_type==imp.PKG_DIRECTORY:
|
||||||
|
self.code = self._get_delegate().get_code()
|
||||||
|
return self.code
|
||||||
|
|
||||||
|
def get_source(self, fullname=None):
|
||||||
|
fullname = self._fix_name(fullname)
|
||||||
|
if self.source is None:
|
||||||
|
mod_type = self.etc[2]
|
||||||
|
if mod_type==imp.PY_SOURCE:
|
||||||
|
self._reopen()
|
||||||
|
try:
|
||||||
|
self.source = self.file.read()
|
||||||
|
finally:
|
||||||
|
self.file.close()
|
||||||
|
elif mod_type==imp.PY_COMPILED:
|
||||||
|
if os.path.exists(self.filename[:-1]):
|
||||||
|
f = open(self.filename[:-1], 'rU')
|
||||||
|
self.source = f.read()
|
||||||
|
f.close()
|
||||||
|
elif mod_type==imp.PKG_DIRECTORY:
|
||||||
|
self.source = self._get_delegate().get_source()
|
||||||
|
return self.source
|
||||||
|
|
||||||
|
|
||||||
|
def _get_delegate(self):
|
||||||
|
return ImpImporter(self.filename).find_module('__init__')
|
||||||
|
|
||||||
|
def get_filename(self, fullname=None):
|
||||||
|
fullname = self._fix_name(fullname)
|
||||||
|
mod_type = self.etc[2]
|
||||||
|
if self.etc[2]==imp.PKG_DIRECTORY:
|
||||||
|
return self._get_delegate().get_filename()
|
||||||
|
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
|
||||||
|
return self.filename
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import zipimport
|
||||||
|
from zipimport import zipimporter
|
||||||
|
|
||||||
|
def iter_zipimport_modules(importer, prefix=''):
|
||||||
|
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
|
||||||
|
dirlist.sort()
|
||||||
|
_prefix = importer.prefix
|
||||||
|
plen = len(_prefix)
|
||||||
|
yielded = {}
|
||||||
|
import inspect
|
||||||
|
for fn in dirlist:
|
||||||
|
if not fn.startswith(_prefix):
|
||||||
|
continue
|
||||||
|
|
||||||
|
fn = fn[plen:].split(os.sep)
|
||||||
|
|
||||||
|
if len(fn)==2 and fn[1].startswith('__init__.py'):
|
||||||
|
if fn[0] not in yielded:
|
||||||
|
yielded[fn[0]] = 1
|
||||||
|
yield fn[0], True
|
||||||
|
|
||||||
|
if len(fn)!=1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
modname = inspect.getmodulename(fn[0])
|
||||||
|
if modname=='__init__':
|
||||||
|
continue
|
||||||
|
|
||||||
|
if modname and '.' not in modname and modname not in yielded:
|
||||||
|
yielded[modname] = 1
|
||||||
|
yield prefix + modname, False
|
||||||
|
|
||||||
|
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_importer(path_item):
|
||||||
|
"""Retrieve a PEP 302 importer for the given path item
|
||||||
|
|
||||||
|
The returned importer is cached in sys.path_importer_cache
|
||||||
|
if it was newly created by a path hook.
|
||||||
|
|
||||||
|
If there is no importer, a wrapper around the basic import
|
||||||
|
machinery is returned. This wrapper is never inserted into
|
||||||
|
the importer cache (None is inserted instead).
|
||||||
|
|
||||||
|
The cache (or part of it) can be cleared manually if a
|
||||||
|
rescan of sys.path_hooks is necessary.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
importer = sys.path_importer_cache[path_item]
|
||||||
|
except KeyError:
|
||||||
|
for path_hook in sys.path_hooks:
|
||||||
|
try:
|
||||||
|
importer = path_hook(path_item)
|
||||||
|
break
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
importer = None
|
||||||
|
sys.path_importer_cache.setdefault(path_item, importer)
|
||||||
|
|
||||||
|
if importer is None:
|
||||||
|
try:
|
||||||
|
importer = ImpImporter(path_item)
|
||||||
|
except ImportError:
|
||||||
|
importer = None
|
||||||
|
return importer
|
||||||
|
|
||||||
|
|
||||||
|
def iter_importers(fullname=""):
|
||||||
|
"""Yield PEP 302 importers for the given module name
|
||||||
|
|
||||||
|
If fullname contains a '.', the importers will be for the package
|
||||||
|
containing fullname, otherwise they will be importers for sys.meta_path,
|
||||||
|
sys.path, and Python's "classic" import machinery, in that order. If
|
||||||
|
the named module is in a package, that package is imported as a side
|
||||||
|
effect of invoking this function.
|
||||||
|
|
||||||
|
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
|
||||||
|
standard import machinery to find files in alternative locations
|
||||||
|
are partially supported, but are searched AFTER sys.path. Normally,
|
||||||
|
these locations are searched BEFORE sys.path, preventing sys.path
|
||||||
|
entries from shadowing them.
|
||||||
|
|
||||||
|
For this to cause a visible difference in behaviour, there must
|
||||||
|
be a module or package name that is accessible via both sys.path
|
||||||
|
and one of the non PEP 302 file system mechanisms. In this case,
|
||||||
|
the emulation will find the former version, while the builtin
|
||||||
|
import mechanism will find the latter.
|
||||||
|
|
||||||
|
Items of the following types can be affected by this discrepancy:
|
||||||
|
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
|
||||||
|
"""
|
||||||
|
if fullname.startswith('.'):
|
||||||
|
raise ImportError("Relative module names not supported")
|
||||||
|
if '.' in fullname:
|
||||||
|
# Get the containing package's __path__
|
||||||
|
pkg = '.'.join(fullname.split('.')[:-1])
|
||||||
|
if pkg not in sys.modules:
|
||||||
|
__import__(pkg)
|
||||||
|
path = getattr(sys.modules[pkg], '__path__', None) or []
|
||||||
|
else:
|
||||||
|
for importer in sys.meta_path:
|
||||||
|
yield importer
|
||||||
|
path = sys.path
|
||||||
|
for item in path:
|
||||||
|
yield get_importer(item)
|
||||||
|
if '.' not in fullname:
|
||||||
|
yield ImpImporter()
|
||||||
|
|
||||||
|
def get_loader(module_or_name):
|
||||||
|
"""Get a PEP 302 "loader" object for module_or_name
|
||||||
|
|
||||||
|
If the module or package is accessible via the normal import
|
||||||
|
mechanism, a wrapper around the relevant part of that machinery
|
||||||
|
is returned. Returns None if the module cannot be found or imported.
|
||||||
|
If the named module is not already imported, its containing package
|
||||||
|
(if any) is imported, in order to establish the package __path__.
|
||||||
|
|
||||||
|
This function uses iter_importers(), and is thus subject to the same
|
||||||
|
limitations regarding platform-specific special import locations such
|
||||||
|
as the Windows registry.
|
||||||
|
"""
|
||||||
|
if module_or_name in sys.modules:
|
||||||
|
module_or_name = sys.modules[module_or_name]
|
||||||
|
if isinstance(module_or_name, ModuleType):
|
||||||
|
module = module_or_name
|
||||||
|
loader = getattr(module, '__loader__', None)
|
||||||
|
if loader is not None:
|
||||||
|
return loader
|
||||||
|
fullname = module.__name__
|
||||||
|
else:
|
||||||
|
fullname = module_or_name
|
||||||
|
return find_loader(fullname)
|
||||||
|
|
||||||
|
def find_loader(fullname):
|
||||||
|
"""Find a PEP 302 "loader" object for fullname
|
||||||
|
|
||||||
|
If fullname contains dots, path must be the containing package's __path__.
|
||||||
|
Returns None if the module cannot be found or imported. This function uses
|
||||||
|
iter_importers(), and is thus subject to the same limitations regarding
|
||||||
|
platform-specific special import locations such as the Windows registry.
|
||||||
|
"""
|
||||||
|
for importer in iter_importers(fullname):
|
||||||
|
loader = importer.find_module(fullname)
|
||||||
|
if loader is not None:
|
||||||
|
return loader
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extend_path(path, name):
|
||||||
|
"""Extend a package's path.
|
||||||
|
|
||||||
|
Intended use is to place the following code in a package's __init__.py:
|
||||||
|
|
||||||
|
from pkgutil import extend_path
|
||||||
|
__path__ = extend_path(__path__, __name__)
|
||||||
|
|
||||||
|
This will add to the package's __path__ all subdirectories of
|
||||||
|
directories on sys.path named after the package. This is useful
|
||||||
|
if one wants to distribute different parts of a single logical
|
||||||
|
package as multiple directories.
|
||||||
|
|
||||||
|
It also looks for *.pkg files beginning where * matches the name
|
||||||
|
argument. This feature is similar to *.pth files (see site.py),
|
||||||
|
except that it doesn't special-case lines starting with 'import'.
|
||||||
|
A *.pkg file is trusted at face value: apart from checking for
|
||||||
|
duplicates, all entries found in a *.pkg file are added to the
|
||||||
|
path, regardless of whether they are exist the filesystem. (This
|
||||||
|
is a feature.)
|
||||||
|
|
||||||
|
If the input path is not a list (as is the case for frozen
|
||||||
|
packages) it is returned unchanged. The input path is not
|
||||||
|
modified; an extended copy is returned. Items are only appended
|
||||||
|
to the copy at the end.
|
||||||
|
|
||||||
|
It is assumed that sys.path is a sequence. Items of sys.path that
|
||||||
|
are not (unicode or 8-bit) strings referring to existing
|
||||||
|
directories are ignored. Unicode items of sys.path that cause
|
||||||
|
errors when used as filenames may cause this function to raise an
|
||||||
|
exception (in line with os.path.isdir() behavior).
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(path, list):
|
||||||
|
# This could happen e.g. when this is called from inside a
|
||||||
|
# frozen package. Return the path unchanged in that case.
|
||||||
|
return path
|
||||||
|
|
||||||
|
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
|
||||||
|
# Just in case os.extsep != '.'
|
||||||
|
sname = os.extsep.join(name.split('.'))
|
||||||
|
sname_pkg = sname + os.extsep + "pkg"
|
||||||
|
init_py = "__init__" + os.extsep + "py"
|
||||||
|
|
||||||
|
path = path[:] # Start with a copy of the existing path
|
||||||
|
|
||||||
|
for dir in sys.path:
|
||||||
|
if not isinstance(dir, basestring) or not os.path.isdir(dir):
|
||||||
|
continue
|
||||||
|
subdir = os.path.join(dir, pname)
|
||||||
|
# XXX This may still add duplicate entries to path on
|
||||||
|
# case-insensitive filesystems
|
||||||
|
initfile = os.path.join(subdir, init_py)
|
||||||
|
if subdir not in path and os.path.isfile(initfile):
|
||||||
|
path.append(subdir)
|
||||||
|
# XXX Is this the right thing for subpackages like zope.app?
|
||||||
|
# It looks for a file named "zope.app.pkg"
|
||||||
|
pkgfile = os.path.join(dir, sname_pkg)
|
||||||
|
if os.path.isfile(pkgfile):
|
||||||
|
try:
|
||||||
|
f = open(pkgfile)
|
||||||
|
except IOError:
|
||||||
|
msg = sys.exc_info()[1]
|
||||||
|
sys.stderr.write("Can't open %s: %s\n" %
|
||||||
|
(pkgfile, msg))
|
||||||
|
else:
|
||||||
|
for line in f:
|
||||||
|
line = line.rstrip('\n')
|
||||||
|
if not line or line.startswith('#'):
|
||||||
|
continue
|
||||||
|
path.append(line) # Don't check for existence!
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
def get_data(package, resource):
|
||||||
|
"""Get a resource from a package.
|
||||||
|
|
||||||
|
This is a wrapper round the PEP 302 loader get_data API. The package
|
||||||
|
argument should be the name of a package, in standard module format
|
||||||
|
(foo.bar). The resource argument should be in the form of a relative
|
||||||
|
filename, using '/' as the path separator. The parent directory name '..'
|
||||||
|
is not allowed, and nor is a rooted name (starting with a '/').
|
||||||
|
|
||||||
|
The function returns a binary string, which is the contents of the
|
||||||
|
specified resource.
|
||||||
|
|
||||||
|
For packages located in the filesystem, which have already been imported,
|
||||||
|
this is the rough equivalent of
|
||||||
|
|
||||||
|
d = os.path.dirname(sys.modules[package].__file__)
|
||||||
|
data = open(os.path.join(d, resource), 'rb').read()
|
||||||
|
|
||||||
|
If the package cannot be located or loaded, or it uses a PEP 302 loader
|
||||||
|
which does not support get_data(), then None is returned.
|
||||||
|
"""
|
||||||
|
|
||||||
|
loader = get_loader(package)
|
||||||
|
if loader is None or not hasattr(loader, 'get_data'):
|
||||||
|
return None
|
||||||
|
mod = sys.modules.get(package) or loader.load_module(package)
|
||||||
|
if mod is None or not hasattr(mod, '__file__'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Modify the resource name to be compatible with the loader.get_data
|
||||||
|
# signature - an os.path format "filename" starting with the dirname of
|
||||||
|
# the package's __file__
|
||||||
|
parts = resource.split('/')
|
||||||
|
parts.insert(0, os.path.dirname(mod.__file__))
|
||||||
|
resource_name = os.path.join(*parts)
|
||||||
|
return loader.get_data(resource_name)
|
453
mitogen-0.3.9/mitogen/compat/tokenize.py
Normal file
453
mitogen-0.3.9/mitogen/compat/tokenize.py
Normal file
@ -0,0 +1,453 @@
|
|||||||
|
"""Tokenization help for Python programs.
|
||||||
|
|
||||||
|
generate_tokens(readline) is a generator that breaks a stream of
|
||||||
|
text into Python tokens. It accepts a readline-like method which is called
|
||||||
|
repeatedly to get the next line of input (or "" for EOF). It generates
|
||||||
|
5-tuples with these members:
|
||||||
|
|
||||||
|
the token type (see token.py)
|
||||||
|
the token (a string)
|
||||||
|
the starting (row, column) indices of the token (a 2-tuple of ints)
|
||||||
|
the ending (row, column) indices of the token (a 2-tuple of ints)
|
||||||
|
the original line (string)
|
||||||
|
|
||||||
|
It is designed to match the working of the Python tokenizer exactly, except
|
||||||
|
that it produces COMMENT tokens for comments and gives type OP for all
|
||||||
|
operators
|
||||||
|
|
||||||
|
Older entry points
|
||||||
|
tokenize_loop(readline, tokeneater)
|
||||||
|
tokenize(readline, tokeneater=printtoken)
|
||||||
|
are the same, except instead of generating tokens, tokeneater is a callback
|
||||||
|
function to which the 5 fields described above are passed as 5 arguments,
|
||||||
|
each time a new token is found."""
|
||||||
|
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
|
||||||
|
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
|
||||||
|
'Skip Montanaro, Raymond Hettinger')
|
||||||
|
|
||||||
|
from itertools import chain
|
||||||
|
import string, re
|
||||||
|
from token import *
|
||||||
|
|
||||||
|
import token
|
||||||
|
__all__ = [x for x in dir(token) if not x.startswith("_")]
|
||||||
|
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
|
||||||
|
del token
|
||||||
|
|
||||||
|
COMMENT = N_TOKENS
|
||||||
|
tok_name[COMMENT] = 'COMMENT'
|
||||||
|
NL = N_TOKENS + 1
|
||||||
|
tok_name[NL] = 'NL'
|
||||||
|
N_TOKENS += 2
|
||||||
|
|
||||||
|
def group(*choices): return '(' + '|'.join(choices) + ')'
|
||||||
|
def any(*choices): return group(*choices) + '*'
|
||||||
|
def maybe(*choices): return group(*choices) + '?'
|
||||||
|
|
||||||
|
Whitespace = r'[ \f\t]*'
|
||||||
|
Comment = r'#[^\r\n]*'
|
||||||
|
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
|
||||||
|
Name = r'[a-zA-Z_]\w*'
|
||||||
|
|
||||||
|
Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
|
||||||
|
Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
|
||||||
|
Binnumber = r'0[bB][01]+[lL]?'
|
||||||
|
Decnumber = r'[1-9]\d*[lL]?'
|
||||||
|
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
|
||||||
|
Exponent = r'[eE][-+]?\d+'
|
||||||
|
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
|
||||||
|
Expfloat = r'\d+' + Exponent
|
||||||
|
Floatnumber = group(Pointfloat, Expfloat)
|
||||||
|
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
|
||||||
|
Number = group(Imagnumber, Floatnumber, Intnumber)
|
||||||
|
|
||||||
|
# Tail end of ' string.
|
||||||
|
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
|
||||||
|
# Tail end of " string.
|
||||||
|
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
|
||||||
|
# Tail end of ''' string.
|
||||||
|
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
|
||||||
|
# Tail end of """ string.
|
||||||
|
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
|
||||||
|
Triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
|
||||||
|
# Single-line ' or " string.
|
||||||
|
String = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
|
||||||
|
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
|
||||||
|
|
||||||
|
# Because of leftmost-then-longest match semantics, be sure to put the
|
||||||
|
# longest operators first (e.g., if = came before ==, == would get
|
||||||
|
# recognized as two instances of =).
|
||||||
|
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
|
||||||
|
r"//=?",
|
||||||
|
r"[+\-*/%&|^=<>]=?",
|
||||||
|
r"~")
|
||||||
|
|
||||||
|
Bracket = '[][(){}]'
|
||||||
|
Special = group(r'\r?\n', r'[:;.,`@]')
|
||||||
|
Funny = group(Operator, Bracket, Special)
|
||||||
|
|
||||||
|
PlainToken = group(Number, Funny, String, Name)
|
||||||
|
Token = Ignore + PlainToken
|
||||||
|
|
||||||
|
# First (or only) line of ' or " string.
|
||||||
|
ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
|
||||||
|
group("'", r'\\\r?\n'),
|
||||||
|
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
|
||||||
|
group('"', r'\\\r?\n'))
|
||||||
|
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
|
||||||
|
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
|
||||||
|
|
||||||
|
tokenprog, pseudoprog, single3prog, double3prog = map(
|
||||||
|
re.compile, (Token, PseudoToken, Single3, Double3))
|
||||||
|
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
|
||||||
|
"'''": single3prog, '"""': double3prog,
|
||||||
|
"r'''": single3prog, 'r"""': double3prog,
|
||||||
|
"u'''": single3prog, 'u"""': double3prog,
|
||||||
|
"ur'''": single3prog, 'ur"""': double3prog,
|
||||||
|
"R'''": single3prog, 'R"""': double3prog,
|
||||||
|
"U'''": single3prog, 'U"""': double3prog,
|
||||||
|
"uR'''": single3prog, 'uR"""': double3prog,
|
||||||
|
"Ur'''": single3prog, 'Ur"""': double3prog,
|
||||||
|
"UR'''": single3prog, 'UR"""': double3prog,
|
||||||
|
"b'''": single3prog, 'b"""': double3prog,
|
||||||
|
"br'''": single3prog, 'br"""': double3prog,
|
||||||
|
"B'''": single3prog, 'B"""': double3prog,
|
||||||
|
"bR'''": single3prog, 'bR"""': double3prog,
|
||||||
|
"Br'''": single3prog, 'Br"""': double3prog,
|
||||||
|
"BR'''": single3prog, 'BR"""': double3prog,
|
||||||
|
'r': None, 'R': None, 'u': None, 'U': None,
|
||||||
|
'b': None, 'B': None}
|
||||||
|
|
||||||
|
triple_quoted = {}
|
||||||
|
for t in ("'''", '"""',
|
||||||
|
"r'''", 'r"""', "R'''", 'R"""',
|
||||||
|
"u'''", 'u"""', "U'''", 'U"""',
|
||||||
|
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
|
||||||
|
"uR'''", 'uR"""', "UR'''", 'UR"""',
|
||||||
|
"b'''", 'b"""', "B'''", 'B"""',
|
||||||
|
"br'''", 'br"""', "Br'''", 'Br"""',
|
||||||
|
"bR'''", 'bR"""', "BR'''", 'BR"""'):
|
||||||
|
triple_quoted[t] = t
|
||||||
|
single_quoted = {}
|
||||||
|
for t in ("'", '"',
|
||||||
|
"r'", 'r"', "R'", 'R"',
|
||||||
|
"u'", 'u"', "U'", 'U"',
|
||||||
|
"ur'", 'ur"', "Ur'", 'Ur"',
|
||||||
|
"uR'", 'uR"', "UR'", 'UR"',
|
||||||
|
"b'", 'b"', "B'", 'B"',
|
||||||
|
"br'", 'br"', "Br'", 'Br"',
|
||||||
|
"bR'", 'bR"', "BR'", 'BR"' ):
|
||||||
|
single_quoted[t] = t
|
||||||
|
|
||||||
|
tabsize = 8
|
||||||
|
|
||||||
|
class TokenError(Exception): pass
|
||||||
|
|
||||||
|
class StopTokenizing(Exception): pass
|
||||||
|
|
||||||
|
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
|
||||||
|
srow, scol = srow_scol
|
||||||
|
erow, ecol = erow_ecol
|
||||||
|
print("%d,%d-%d,%d:\t%s\t%s" % \
|
||||||
|
(srow, scol, erow, ecol, tok_name[type], repr(token)))
|
||||||
|
|
||||||
|
def tokenize(readline, tokeneater=printtoken):
|
||||||
|
"""
|
||||||
|
The tokenize() function accepts two parameters: one representing the
|
||||||
|
input stream, and one providing an output mechanism for tokenize().
|
||||||
|
|
||||||
|
The first parameter, readline, must be a callable object which provides
|
||||||
|
the same interface as the readline() method of built-in file objects.
|
||||||
|
Each call to the function should return one line of input as a string.
|
||||||
|
|
||||||
|
The second parameter, tokeneater, must also be a callable object. It is
|
||||||
|
called once for each token, with five arguments, corresponding to the
|
||||||
|
tuples generated by generate_tokens().
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
tokenize_loop(readline, tokeneater)
|
||||||
|
except StopTokenizing:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# backwards compatible interface
|
||||||
|
def tokenize_loop(readline, tokeneater):
|
||||||
|
for token_info in generate_tokens(readline):
|
||||||
|
tokeneater(*token_info)
|
||||||
|
|
||||||
|
class Untokenizer:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.tokens = []
|
||||||
|
self.prev_row = 1
|
||||||
|
self.prev_col = 0
|
||||||
|
|
||||||
|
def add_whitespace(self, start):
|
||||||
|
row, col = start
|
||||||
|
if row < self.prev_row or row == self.prev_row and col < self.prev_col:
|
||||||
|
raise ValueError("start ({},{}) precedes previous end ({},{})"
|
||||||
|
.format(row, col, self.prev_row, self.prev_col))
|
||||||
|
row_offset = row - self.prev_row
|
||||||
|
if row_offset:
|
||||||
|
self.tokens.append("\\\n" * row_offset)
|
||||||
|
self.prev_col = 0
|
||||||
|
col_offset = col - self.prev_col
|
||||||
|
if col_offset:
|
||||||
|
self.tokens.append(" " * col_offset)
|
||||||
|
|
||||||
|
def untokenize(self, iterable):
|
||||||
|
it = iter(iterable)
|
||||||
|
indents = []
|
||||||
|
startline = False
|
||||||
|
for t in it:
|
||||||
|
if len(t) == 2:
|
||||||
|
self.compat(t, it)
|
||||||
|
break
|
||||||
|
tok_type, token, start, end, line = t
|
||||||
|
if tok_type == ENDMARKER:
|
||||||
|
break
|
||||||
|
if tok_type == INDENT:
|
||||||
|
indents.append(token)
|
||||||
|
continue
|
||||||
|
elif tok_type == DEDENT:
|
||||||
|
indents.pop()
|
||||||
|
self.prev_row, self.prev_col = end
|
||||||
|
continue
|
||||||
|
elif tok_type in (NEWLINE, NL):
|
||||||
|
startline = True
|
||||||
|
elif startline and indents:
|
||||||
|
indent = indents[-1]
|
||||||
|
if start[1] >= len(indent):
|
||||||
|
self.tokens.append(indent)
|
||||||
|
self.prev_col = len(indent)
|
||||||
|
startline = False
|
||||||
|
self.add_whitespace(start)
|
||||||
|
self.tokens.append(token)
|
||||||
|
self.prev_row, self.prev_col = end
|
||||||
|
if tok_type in (NEWLINE, NL):
|
||||||
|
self.prev_row += 1
|
||||||
|
self.prev_col = 0
|
||||||
|
return "".join(self.tokens)
|
||||||
|
|
||||||
|
def compat(self, token, iterable):
|
||||||
|
indents = []
|
||||||
|
toks_append = self.tokens.append
|
||||||
|
startline = token[0] in (NEWLINE, NL)
|
||||||
|
prevstring = False
|
||||||
|
|
||||||
|
for tok in chain([token], iterable):
|
||||||
|
toknum, tokval = tok[:2]
|
||||||
|
|
||||||
|
if toknum in (NAME, NUMBER):
|
||||||
|
tokval += ' '
|
||||||
|
|
||||||
|
# Insert a space between two consecutive strings
|
||||||
|
if toknum == STRING:
|
||||||
|
if prevstring:
|
||||||
|
tokval = ' ' + tokval
|
||||||
|
prevstring = True
|
||||||
|
else:
|
||||||
|
prevstring = False
|
||||||
|
|
||||||
|
if toknum == INDENT:
|
||||||
|
indents.append(tokval)
|
||||||
|
continue
|
||||||
|
elif toknum == DEDENT:
|
||||||
|
indents.pop()
|
||||||
|
continue
|
||||||
|
elif toknum in (NEWLINE, NL):
|
||||||
|
startline = True
|
||||||
|
elif startline and indents:
|
||||||
|
toks_append(indents[-1])
|
||||||
|
startline = False
|
||||||
|
toks_append(tokval)
|
||||||
|
|
||||||
|
def untokenize(iterable):
|
||||||
|
"""Transform tokens back into Python source code.
|
||||||
|
|
||||||
|
Each element returned by the iterable must be a token sequence
|
||||||
|
with at least two elements, a token number and token value. If
|
||||||
|
only two tokens are passed, the resulting output is poor.
|
||||||
|
|
||||||
|
Round-trip invariant for full input:
|
||||||
|
Untokenized source will match input source exactly
|
||||||
|
|
||||||
|
Round-trip invariant for limited intput:
|
||||||
|
# Output text will tokenize the back to the input
|
||||||
|
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
|
||||||
|
newcode = untokenize(t1)
|
||||||
|
readline = iter(newcode.splitlines(1)).next
|
||||||
|
t2 = [tok[:2] for tok in generate_tokens(readline)]
|
||||||
|
assert t1 == t2
|
||||||
|
"""
|
||||||
|
ut = Untokenizer()
|
||||||
|
return ut.untokenize(iterable)
|
||||||
|
|
||||||
|
def generate_tokens(readline):
|
||||||
|
"""
|
||||||
|
The generate_tokens() generator requires one argument, readline, which
|
||||||
|
must be a callable object which provides the same interface as the
|
||||||
|
readline() method of built-in file objects. Each call to the function
|
||||||
|
should return one line of input as a string. Alternately, readline
|
||||||
|
can be a callable function terminating with StopIteration:
|
||||||
|
readline = open(myfile).next # Example of alternate readline
|
||||||
|
|
||||||
|
The generator produces 5-tuples with these members: the token type; the
|
||||||
|
token string; a 2-tuple (srow, scol) of ints specifying the row and
|
||||||
|
column where the token begins in the source; a 2-tuple (erow, ecol) of
|
||||||
|
ints specifying the row and column where the token ends in the source;
|
||||||
|
and the line on which the token was found. The line passed is the
|
||||||
|
logical line; continuation lines are included.
|
||||||
|
"""
|
||||||
|
lnum = parenlev = continued = 0
|
||||||
|
namechars, numchars = string.ascii_letters + '_', '0123456789'
|
||||||
|
contstr, needcont = '', 0
|
||||||
|
contline = None
|
||||||
|
indents = [0]
|
||||||
|
|
||||||
|
while 1: # loop over lines in stream
|
||||||
|
try:
|
||||||
|
line = readline()
|
||||||
|
except StopIteration:
|
||||||
|
line = ''
|
||||||
|
lnum += 1
|
||||||
|
pos, max = 0, len(line)
|
||||||
|
|
||||||
|
if contstr: # continued string
|
||||||
|
if not line:
|
||||||
|
raise TokenError("EOF in multi-line string", strstart)
|
||||||
|
endmatch = endprog.match(line)
|
||||||
|
if endmatch:
|
||||||
|
pos = end = endmatch.end(0)
|
||||||
|
yield (STRING, contstr + line[:end],
|
||||||
|
strstart, (lnum, end), contline + line)
|
||||||
|
contstr, needcont = '', 0
|
||||||
|
contline = None
|
||||||
|
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
|
||||||
|
yield (ERRORTOKEN, contstr + line,
|
||||||
|
strstart, (lnum, len(line)), contline)
|
||||||
|
contstr = ''
|
||||||
|
contline = None
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
contstr = contstr + line
|
||||||
|
contline = contline + line
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif parenlev == 0 and not continued: # new statement
|
||||||
|
if not line: break
|
||||||
|
column = 0
|
||||||
|
while pos < max: # measure leading whitespace
|
||||||
|
if line[pos] == ' ':
|
||||||
|
column += 1
|
||||||
|
elif line[pos] == '\t':
|
||||||
|
column = (column//tabsize + 1)*tabsize
|
||||||
|
elif line[pos] == '\f':
|
||||||
|
column = 0
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
pos += 1
|
||||||
|
if pos == max:
|
||||||
|
break
|
||||||
|
|
||||||
|
if line[pos] in '#\r\n': # skip comments or blank lines
|
||||||
|
if line[pos] == '#':
|
||||||
|
comment_token = line[pos:].rstrip('\r\n')
|
||||||
|
nl_pos = pos + len(comment_token)
|
||||||
|
yield (COMMENT, comment_token,
|
||||||
|
(lnum, pos), (lnum, pos + len(comment_token)), line)
|
||||||
|
yield (NL, line[nl_pos:],
|
||||||
|
(lnum, nl_pos), (lnum, len(line)), line)
|
||||||
|
else:
|
||||||
|
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
|
||||||
|
(lnum, pos), (lnum, len(line)), line)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if column > indents[-1]: # count indents or dedents
|
||||||
|
indents.append(column)
|
||||||
|
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
|
||||||
|
while column < indents[-1]:
|
||||||
|
if column not in indents:
|
||||||
|
raise IndentationError(
|
||||||
|
"unindent does not match any outer indentation level",
|
||||||
|
("<tokenize>", lnum, pos, line))
|
||||||
|
indents = indents[:-1]
|
||||||
|
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
|
||||||
|
|
||||||
|
else: # continued statement
|
||||||
|
if not line:
|
||||||
|
raise TokenError("EOF in multi-line statement", (lnum, 0))
|
||||||
|
continued = 0
|
||||||
|
|
||||||
|
while pos < max:
|
||||||
|
pseudomatch = pseudoprog.match(line, pos)
|
||||||
|
if pseudomatch: # scan for tokens
|
||||||
|
start, end = pseudomatch.span(1)
|
||||||
|
spos, epos, pos = (lnum, start), (lnum, end), end
|
||||||
|
if start == end:
|
||||||
|
continue
|
||||||
|
token, initial = line[start:end], line[start]
|
||||||
|
|
||||||
|
if initial in numchars or \
|
||||||
|
(initial == '.' and token != '.'): # ordinary number
|
||||||
|
yield (NUMBER, token, spos, epos, line)
|
||||||
|
elif initial in '\r\n':
|
||||||
|
if parenlev > 0:
|
||||||
|
n = NL
|
||||||
|
else:
|
||||||
|
n = NEWLINE
|
||||||
|
yield (n, token, spos, epos, line)
|
||||||
|
elif initial == '#':
|
||||||
|
assert not token.endswith("\n")
|
||||||
|
yield (COMMENT, token, spos, epos, line)
|
||||||
|
elif token in triple_quoted:
|
||||||
|
endprog = endprogs[token]
|
||||||
|
endmatch = endprog.match(line, pos)
|
||||||
|
if endmatch: # all on one line
|
||||||
|
pos = endmatch.end(0)
|
||||||
|
token = line[start:pos]
|
||||||
|
yield (STRING, token, spos, (lnum, pos), line)
|
||||||
|
else:
|
||||||
|
strstart = (lnum, start) # multiple lines
|
||||||
|
contstr = line[start:]
|
||||||
|
contline = line
|
||||||
|
break
|
||||||
|
elif initial in single_quoted or \
|
||||||
|
token[:2] in single_quoted or \
|
||||||
|
token[:3] in single_quoted:
|
||||||
|
if token[-1] == '\n': # continued string
|
||||||
|
strstart = (lnum, start)
|
||||||
|
endprog = (endprogs[initial] or endprogs[token[1]] or
|
||||||
|
endprogs[token[2]])
|
||||||
|
contstr, needcont = line[start:], 1
|
||||||
|
contline = line
|
||||||
|
break
|
||||||
|
else: # ordinary string
|
||||||
|
yield (STRING, token, spos, epos, line)
|
||||||
|
elif initial in namechars: # ordinary name
|
||||||
|
yield (NAME, token, spos, epos, line)
|
||||||
|
elif initial == '\\': # continued stmt
|
||||||
|
continued = 1
|
||||||
|
else:
|
||||||
|
if initial in '([{':
|
||||||
|
parenlev += 1
|
||||||
|
elif initial in ')]}':
|
||||||
|
parenlev -= 1
|
||||||
|
yield (OP, token, spos, epos, line)
|
||||||
|
else:
|
||||||
|
yield (ERRORTOKEN, line[pos],
|
||||||
|
(lnum, pos), (lnum, pos+1), line)
|
||||||
|
pos += 1
|
||||||
|
|
||||||
|
for indent in indents[1:]: # pop remaining indent levels
|
||||||
|
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
|
||||||
|
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
||||||
|
|
||||||
|
if __name__ == '__main__': # testing
|
||||||
|
import sys
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
tokenize(open(sys.argv[1]).readline)
|
||||||
|
else:
|
||||||
|
tokenize(sys.stdin.readline)
|
4196
mitogen-0.3.9/mitogen/core.py
Normal file
4196
mitogen-0.3.9/mitogen/core.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user