ansible  2.9.27
About: Ansible is an IT Configuration Management, Deployment \
About: Ansible (2.x) is an IT Configuration Management, Deployment & Orchestration tool.
ansible download page.
  Fossies Dox: ansible-2.9.27.tar.gz  ("unofficial" and yet experimental doxygen-generated source code documentation)  

basic.py
Go to the documentation of this file.
1# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
2# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
3# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
4
5from __future__ import absolute_import, division, print_function
6
7FILE_ATTRIBUTES = {
8 'A': 'noatime',
9 'a': 'append',
10 'c': 'compressed',
11 'C': 'nocow',
12 'd': 'nodump',
13 'D': 'dirsync',
14 'e': 'extents',
15 'E': 'encrypted',
16 'h': 'blocksize',
17 'i': 'immutable',
18 'I': 'indexed',
19 'j': 'journalled',
20 'N': 'inline',
21 's': 'zero',
22 'S': 'synchronous',
23 't': 'notail',
24 'T': 'blockroot',
25 'u': 'undelete',
26 'X': 'compressedraw',
27 'Z': 'compresseddirty',
28}
29
30# Ansible modules can be written in any language.
31# The functions available here can be used to do many common tasks,
32# to simplify development of Python modules.
33
34import __main__
35import atexit
36import errno
37import datetime
38import grp
39import fcntl
40import locale
41import os
42import pwd
43import platform
44import re
45import select
46import shlex
47import shutil
48import signal
49import stat
50import subprocess
51import sys
52import tempfile
53import time
54import traceback
55import types
56
57from collections import deque
58from itertools import chain, repeat
59
60try:
61 import syslog
62 HAS_SYSLOG = True
63except ImportError:
64 HAS_SYSLOG = False
65
66try:
67 from systemd import journal
68 has_journal = True
69except ImportError:
70 has_journal = False
71
72HAVE_SELINUX = False
73try:
74 import selinux
75 HAVE_SELINUX = True
76except ImportError:
77 pass
78
79# Python2 & 3 way to get NoneType
80NoneType = type(None)
81
82from ansible.module_utils.compat import selectors
83
84from ._text import to_native, to_bytes, to_text
86 jsonify,
87 container_to_bytes as json_dict_unicode_to_bytes,
88 container_to_text as json_dict_bytes_to_unicode,
89)
90
92 lenient_lowercase,
93 bytes_to_human,
94 human_to_bytes,
95 SIZE_RANGES,
96)
97
98try:
100except ImportError as e:
101 print('\n{{"msg": "Error: ansible requires the stdlib json: {0}", "failed": true}}'.format(to_native(e)))
102 sys.exit(1)
103
104
105AVAILABLE_HASH_ALGORITHMS = dict()
106try:
107 import hashlib
108
109 # python 2.7.9+ and 2.7.0+
110 for attribute in ('available_algorithms', 'algorithms'):
111 algorithms = getattr(hashlib, attribute, None)
112 if algorithms:
113 break
114 if algorithms is None:
115 # python 2.5+
116 algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
117 for algorithm in algorithms:
118 AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
119
120 # we may have been able to import md5 but it could still not be available
121 try:
122 hashlib.md5()
123 except ValueError:
124 AVAILABLE_HASH_ALGORITHMS.pop('md5', None)
125except Exception:
126 import sha
127 AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
128 try:
129 import md5
130 AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
131 except Exception:
132 pass
133
135 KeysView,
136 Mapping, MutableMapping,
137 Sequence, MutableSequence,
138 Set, MutableSet,
139)
140from ansible.module_utils.common.process import get_bin_path
142 _PERM_BITS as PERM_BITS,
143 _EXEC_PERM_BITS as EXEC_PERM_BITS,
144 _DEFAULT_PERM as DEFAULT_PERM,
145 is_executable,
146 format_attributes,
147 get_flags_from_attributes,
148)
150 get_distribution,
151 get_distribution_version,
152 get_platform_subclass,
153)
154from ansible.module_utils.pycompat24 import get_exception, literal_eval
156 handle_aliases,
157 list_deprecations,
158 list_no_log_values,
159 PASS_VARS,
160 PASS_BOOLS,
161)
162
163from ansible.module_utils.six import (
164 PY2,
165 PY3,
166 b,
167 binary_type,
168 integer_types,
169 iteritems,
170 string_types,
171 text_type,
172)
173from ansible.module_utils.six.moves import map, reduce, shlex_quote
175 check_missing_parameters,
176 check_mutually_exclusive,
177 check_required_arguments,
178 check_required_by,
179 check_required_if,
180 check_required_one_of,
181 check_required_together,
182 count_terms,
183 check_type_bool,
184 check_type_bits,
185 check_type_bytes,
186 check_type_float,
187 check_type_int,
188 check_type_jsonarg,
189 check_type_list,
190 check_type_dict,
191 check_type_path,
192 check_type_raw,
193 check_type_str,
194 safe_eval,
195)
196from ansible.module_utils.common._utils import get_all_subclasses as _get_all_subclasses
197from ansible.module_utils.parsing.convert_bool import BOOLEANS, BOOLEANS_FALSE, BOOLEANS_TRUE, boolean
198
199# Note: When getting Sequence from collections, it matches with strings. If
200# this matters, make sure to check for strings before checking for sequencetype
201SEQUENCETYPE = frozenset, KeysView, Sequence
202
203PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
204
205imap = map
206
207try:
208 # Python 2
209 unicode
210except NameError:
211 # Python 3
212 unicode = text_type
213
214try:
215 # Python 2
216 basestring
217except NameError:
218 # Python 3
219 basestring = string_types
220
221_literal_eval = literal_eval
222
223# End of deprecated names
224
225# Internal global holding passed in params. This is consulted in case
226# multiple AnsibleModules are created. Otherwise each AnsibleModule would
227# attempt to read from stdin. Other code should not use this directly as it
228# is an internal implementation detail
229_ANSIBLE_ARGS = None
230
231
232def env_fallback(*args, **kwargs):
233 ''' Load value from environment '''
234 for arg in args:
235 if arg in os.environ:
236 return os.environ[arg]
237 raise AnsibleFallbackNotFound
238
239
240FILE_COMMON_ARGUMENTS = dict(
241 # These are things we want. About setting metadata (mode, ownership, permissions in general) on
242 # created files (these are used by set_fs_attributes_if_different and included in
243 # load_file_common_arguments)
244 mode=dict(type='raw'),
245 owner=dict(),
246 group=dict(),
247 seuser=dict(),
248 serole=dict(),
249 selevel=dict(),
250 setype=dict(),
251 attributes=dict(aliases=['attr']),
252
253 # The following are not about perms and should not be in a rewritten file_common_args
254 src=dict(), # Maybe dest or path would be appropriate but src is not
255 follow=dict(type='bool', default=False), # Maybe follow is appropriate because it determines whether to follow symlinks for permission purposes too
256 force=dict(type='bool'),
257
258 # not taken by the file module, but other action plugins call the file module so this ignores
259 # them for now. In the future, the caller should take care of removing these from the module
260 # arguments before calling the file module.
261 content=dict(no_log=True), # used by copy
262 backup=dict(), # Used by a few modules to create a remote backup before updating the file
263 remote_src=dict(), # used by assemble
264 regexp=dict(), # used by assemble
265 delimiter=dict(), # used by assemble
266 directory_mode=dict(), # used by copy
267 unsafe_writes=dict(type='bool', default=False, fallback=(env_fallback, ['ANSIBLE_UNSAFE_WRITES'])), # should be available to any module using atomic_move
268)
269
270PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
271
272# Used for parsing symbolic file perms
273MODE_OPERATOR_RE = re.compile(r'[+=-]')
274USERS_RE = re.compile(r'[^ugo]')
275PERMS_RE = re.compile(r'[^rwxXstugo]')
276
277# Used for determining if the system is running a new enough python version
278# and should only restrict on our documented minimum versions
279_PY3_MIN = sys.version_info[:2] >= (3, 5)
280_PY2_MIN = (2, 6) <= sys.version_info[:2] < (3,)
281_PY_MIN = _PY3_MIN or _PY2_MIN
282if not _PY_MIN:
283 print(
284 '\n{"failed": true, '
285 '"msg": "Ansible requires a minimum of Python2 version 2.6 or Python3 version 3.5. Current version: %s"}' % ''.join(sys.version.splitlines())
286 )
287 sys.exit(1)
288
289
290#
291# Deprecated functions
292#
293
295 '''
296 **Deprecated** Use :py:func:`platform.system` directly.
297
298 :returns: Name of the platform the module is running on in a native string
299
300 Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
301 the result of calling :py:func:`platform.system`.
302 '''
303 return platform.system()
304
305# End deprecated functions
306
307
308#
309# Compat shims
310#
311
312def load_platform_subclass(cls, *args, **kwargs):
313 """**Deprecated**: Use ansible.module_utils.common.sys_info.get_platform_subclass instead"""
314 platform_cls = get_platform_subclass(cls)
315 return super(cls, platform_cls).__new__(platform_cls)
316
317
319 """**Deprecated**: Use ansible.module_utils.common._utils.get_all_subclasses instead"""
320 return list(_get_all_subclasses(cls))
321
322
323# End compat shims
324
325
326def _remove_values_conditions(value, no_log_strings, deferred_removals):
327 """
328 Helper function for :meth:`remove_values`.
329
330 :arg value: The value to check for strings that need to be stripped
331 :arg no_log_strings: set of strings which must be stripped out of any values
332 :arg deferred_removals: List which holds information about nested
333 containers that have to be iterated for removals. It is passed into
334 this function so that more entries can be added to it if value is
335 a container type. The format of each entry is a 2-tuple where the first
336 element is the ``value`` parameter and the second value is a new
337 container to copy the elements of ``value`` into once iterated.
338 :returns: if ``value`` is a scalar, returns ``value`` with two exceptions:
339 1. :class:`~datetime.datetime` objects which are changed into a string representation.
340 2. objects which are in no_log_strings are replaced with a placeholder
341 so that no sensitive data is leaked.
342 If ``value`` is a container type, returns a new empty container.
343
344 ``deferred_removals`` is added to as a side-effect of this function.
345
346 .. warning:: It is up to the caller to make sure the order in which value
347 is passed in is correct. For instance, higher level containers need
348 to be passed in before lower level containers. For example, given
349 ``{'level1': {'level2': 'level3': [True]} }`` first pass in the
350 dictionary for ``level1``, then the dict for ``level2``, and finally
351 the list for ``level3``.
352 """
353 if isinstance(value, (text_type, binary_type)):
354 # Need native str type
355 native_str_value = value
356 if isinstance(value, text_type):
357 value_is_text = True
358 if PY2:
359 native_str_value = to_bytes(value, errors='surrogate_or_strict')
360 elif isinstance(value, binary_type):
361 value_is_text = False
362 if PY3:
363 native_str_value = to_text(value, errors='surrogate_or_strict')
364
365 if native_str_value in no_log_strings:
366 return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
367 for omit_me in no_log_strings:
368 native_str_value = native_str_value.replace(omit_me, '*' * 8)
369
370 if value_is_text and isinstance(native_str_value, binary_type):
371 value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
372 elif not value_is_text and isinstance(native_str_value, text_type):
373 value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
374 else:
375 value = native_str_value
376
377 elif isinstance(value, Sequence):
378 if isinstance(value, MutableSequence):
379 new_value = type(value)()
380 else:
381 new_value = [] # Need a mutable value
382 deferred_removals.append((value, new_value))
383 value = new_value
384
385 elif isinstance(value, Set):
386 if isinstance(value, MutableSet):
387 new_value = type(value)()
388 else:
389 new_value = set() # Need a mutable value
390 deferred_removals.append((value, new_value))
391 value = new_value
392
393 elif isinstance(value, Mapping):
394 if isinstance(value, MutableMapping):
395 new_value = type(value)()
396 else:
397 new_value = {} # Need a mutable value
398 deferred_removals.append((value, new_value))
399 value = new_value
400
401 elif isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
402 stringy_value = to_native(value, encoding='utf-8', errors='surrogate_or_strict')
403 if stringy_value in no_log_strings:
404 return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
405 for omit_me in no_log_strings:
406 if omit_me in stringy_value:
407 return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
408
409 elif isinstance(value, datetime.datetime):
410 value = value.isoformat()
411 else:
412 raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
413
414 return value
415
416
417def remove_values(value, no_log_strings):
418 """ Remove strings in no_log_strings from value. If value is a container
419 type, then remove a lot more.
420
421 Use of deferred_removals exists, rather than a pure recursive solution,
422 because of the potential to hit the maximum recursion depth when dealing with
423 large amounts of data (see issue #24560).
424 """
425
426 deferred_removals = deque()
427
428 no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
429 new_value = _remove_values_conditions(value, no_log_strings, deferred_removals)
430
431 while deferred_removals:
432 old_data, new_data = deferred_removals.popleft()
433 if isinstance(new_data, Mapping):
434 for old_key, old_elem in old_data.items():
435 new_elem = _remove_values_conditions(old_elem, no_log_strings, deferred_removals)
436 new_data[old_key] = new_elem
437 else:
438 for elem in old_data:
439 new_elem = _remove_values_conditions(elem, no_log_strings, deferred_removals)
440 if isinstance(new_data, MutableSequence):
441 new_data.append(new_elem)
442 elif isinstance(new_data, MutableSet):
443 new_data.add(new_elem)
444 else:
445 raise TypeError('Unknown container type encountered when removing private values from output')
446
447 return new_value
448
449
450def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
451 """ Helper method to sanitize_keys() to build deferred_removals and avoid deep recursion. """
452 if isinstance(value, (text_type, binary_type)):
453 return value
454
455 if isinstance(value, Sequence):
456 if isinstance(value, MutableSequence):
457 new_value = type(value)()
458 else:
459 new_value = [] # Need a mutable value
460 deferred_removals.append((value, new_value))
461 return new_value
462
463 if isinstance(value, Set):
464 if isinstance(value, MutableSet):
465 new_value = type(value)()
466 else:
467 new_value = set() # Need a mutable value
468 deferred_removals.append((value, new_value))
469 return new_value
470
471 if isinstance(value, Mapping):
472 if isinstance(value, MutableMapping):
473 new_value = type(value)()
474 else:
475 new_value = {} # Need a mutable value
476 deferred_removals.append((value, new_value))
477 return new_value
478
479 if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
480 return value
481
482 if isinstance(value, (datetime.datetime, datetime.date)):
483 return value
484
485 raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
486
487
488def sanitize_keys(obj, no_log_strings, ignore_keys=frozenset()):
489 """ Sanitize the keys in a container object by removing no_log values from key names.
490
491 This is a companion function to the `remove_values()` function. Similar to that function,
492 we make use of deferred_removals to avoid hitting maximum recursion depth in cases of
493 large data structures.
494
495 :param obj: The container object to sanitize. Non-container objects are returned unmodified.
496 :param no_log_strings: A set of string values we do not want logged.
497 :param ignore_keys: A set of string values of keys to not sanitize.
498
499 :returns: An object with sanitized keys.
500 """
501
502 deferred_removals = deque()
503
504 no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
505 new_value = _sanitize_keys_conditions(obj, no_log_strings, ignore_keys, deferred_removals)
506
507 while deferred_removals:
508 old_data, new_data = deferred_removals.popleft()
509
510 if isinstance(new_data, Mapping):
511 for old_key, old_elem in old_data.items():
512 if old_key in ignore_keys or old_key.startswith('_ansible'):
513 new_data[old_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
514 else:
515 # Sanitize the old key. We take advantage of the sanitizing code in
516 # _remove_values_conditions() rather than recreating it here.
517 new_key = _remove_values_conditions(old_key, no_log_strings, None)
518 new_data[new_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
519 else:
520 for elem in old_data:
521 new_elem = _sanitize_keys_conditions(elem, no_log_strings, ignore_keys, deferred_removals)
522 if isinstance(new_data, MutableSequence):
523 new_data.append(new_elem)
524 elif isinstance(new_data, MutableSet):
525 new_data.add(new_elem)
526 else:
527 raise TypeError('Unknown container type encountered when removing private values from keys')
528
529 return new_value
530
531
532def heuristic_log_sanitize(data, no_log_values=None):
533 ''' Remove strings that look like passwords from log messages '''
534 # Currently filters:
535 # user:pass@foo/whatever and http://username:pass@wherever/foo
536 # This code has false positives and consumes parts of logs that are
537 # not passwds
538
539 # begin: start of a passwd containing string
540 # end: end of a passwd containing string
541 # sep: char between user and passwd
542 # prev_begin: where in the overall string to start a search for
543 # a passwd
544 # sep_search_end: where in the string to end a search for the sep
545 data = to_native(data)
546
547 output = []
548 begin = len(data)
549 prev_begin = begin
550 sep = 1
551 while sep:
552 # Find the potential end of a passwd
553 try:
554 end = data.rindex('@', 0, begin)
555 except ValueError:
556 # No passwd in the rest of the data
557 output.insert(0, data[0:begin])
558 break
559
560 # Search for the beginning of a passwd
561 sep = None
562 sep_search_end = end
563 while not sep:
564 # URL-style username+password
565 try:
566 begin = data.rindex('://', 0, sep_search_end)
567 except ValueError:
568 # No url style in the data, check for ssh style in the
569 # rest of the string
570 begin = 0
571 # Search for separator
572 try:
573 sep = data.index(':', begin + 3, end)
574 except ValueError:
575 # No separator; choices:
576 if begin == 0:
577 # Searched the whole string so there's no password
578 # here. Return the remaining data
579 output.insert(0, data[0:begin])
580 break
581 # Search for a different beginning of the password field.
582 sep_search_end = begin
583 continue
584 if sep:
585 # Password was found; remove it.
586 output.insert(0, data[end:prev_begin])
587 output.insert(0, '********')
588 output.insert(0, data[begin:sep + 1])
589 prev_begin = begin
590
591 output = ''.join(output)
592 if no_log_values:
593 output = remove_values(output, no_log_values)
594 return output
595
596
598 ''' read the modules parameters and store them globally.
599
600 This function may be needed for certain very dynamic custom modules which
601 want to process the parameters that are being handed the module. Since
602 this is so closely tied to the implementation of modules we cannot
603 guarantee API stability for it (it may change between versions) however we
604 will try not to break it gratuitously. It is certainly more future-proof
605 to call this function and consume its outputs than to implement the logic
606 inside it as a copy in your own code.
607 '''
608 global _ANSIBLE_ARGS
609 if _ANSIBLE_ARGS is not None:
610 buffer = _ANSIBLE_ARGS
611 else:
612 # debug overrides to read args from file or cmdline
613
614 # Avoid tracebacks when locale is non-utf8
615 # We control the args and we pass them as utf8
616 if len(sys.argv) > 1:
617 if os.path.isfile(sys.argv[1]):
618 fd = open(sys.argv[1], 'rb')
619 buffer = fd.read()
620 fd.close()
621 else:
622 buffer = sys.argv[1]
623 if PY3:
624 buffer = buffer.encode('utf-8', errors='surrogateescape')
625 # default case, read from stdin
626 else:
627 if PY2:
628 buffer = sys.stdin.read()
629 else:
630 buffer = sys.stdin.buffer.read()
631 _ANSIBLE_ARGS = buffer
632
633 try:
634 params = json.loads(buffer.decode('utf-8'))
635 except ValueError:
636 # This helper used too early for fail_json to work.
637 print('\n{"msg": "Error: Module unable to decode valid JSON on stdin. Unable to figure out what parameters were passed", "failed": true}')
638 sys.exit(1)
639
640 if PY2:
641 params = json_dict_unicode_to_bytes(params)
642
643 try:
644 return params['ANSIBLE_MODULE_ARGS']
645 except KeyError:
646 # This helper does not have access to fail_json so we have to print
647 # json output on our own.
648 print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in json data from stdin. Unable to figure out what parameters were passed", '
649 '"failed": true}')
650 sys.exit(1)
651
652
653def missing_required_lib(library, reason=None, url=None):
654 hostname = platform.node()
655 msg = "Failed to import the required Python library (%s) on %s's Python %s." % (library, hostname, sys.executable)
656 if reason:
657 msg += " This is required %s." % reason
658 if url:
659 msg += " See %s for more info." % url
660
661 msg += (" Please read module documentation and install in the appropriate location."
662 " If the required library is installed, but Ansible is using the wrong Python interpreter,"
663 " please consult the documentation on ansible_python_interpreter")
664 return msg
665
666
667class AnsibleFallbackNotFound(Exception):
668 pass
669
670
671class AnsibleModule(object):
672 def __init__(self, argument_spec, bypass_checks=False, no_log=False,
673 check_invalid_arguments=None, mutually_exclusive=None, required_together=None,
674 required_one_of=None, add_file_common_args=False, supports_check_mode=False,
675 required_if=None, required_by=None):
676
677 '''
678 Common code for quickly building an ansible module in Python
679 (although you can write modules with anything that can return JSON).
680
681 See :ref:`developing_modules_general` for a general introduction
682 and :ref:`developing_program_flow_modules` for more detailed explanation.
683 '''
684
685 self._name_name = os.path.basename(__file__) # initialize name until we can parse from options
686 self.argument_specargument_spec = argument_spec
687 self.supports_check_modesupports_check_mode = supports_check_mode
688 self.check_modecheck_mode = False
689 self.bypass_checksbypass_checks = bypass_checks
690 self.no_logno_log = no_log
691
692 # Check whether code set this explicitly for deprecation purposes
693 if check_invalid_arguments is None:
694 check_invalid_arguments = True
695 module_set_check_invalid_arguments = False
696 else:
697 module_set_check_invalid_arguments = True
698 self.check_invalid_argumentscheck_invalid_arguments = check_invalid_arguments
699
700 self.mutually_exclusivemutually_exclusive = mutually_exclusive
701 self.required_togetherrequired_together = required_together
702 self.required_one_ofrequired_one_of = required_one_of
703 self.required_ifrequired_if = required_if
704 self.required_byrequired_by = required_by
705 self.cleanup_filescleanup_files = []
706 self._debug_debug = False
707 self._diff_diff = False
708 self._socket_path_socket_path = None
709 self._shell_shell = None
710 self._verbosity_verbosity = 0
711 # May be used to set modifications to the environment for any
712 # run_command invocation
713 self.run_command_environ_updaterun_command_environ_update = {}
714 self._warnings_warnings = []
715 self._deprecations_deprecations = []
716 self._clean_clean = {}
717 self._string_conversion_action_string_conversion_action = ''
718
719 self.aliasesaliases = {}
720 self._legal_inputs_legal_inputs = []
721 self._options_context_options_context = list()
722 self._tmpdir_tmpdir = None
723
724 if add_file_common_args:
725 for k, v in FILE_COMMON_ARGUMENTS.items():
726 if k not in self.argument_specargument_spec:
727 self.argument_specargument_spec[k] = v
728
729 # Save parameter values that should never be logged
730 self.no_log_valuesno_log_values = set()
731
732 self._load_params_load_params()
733 self._set_fallbacks_set_fallbacks()
734
735 # append to legal_inputs and then possibly check against them
736 try:
737 self.aliasesaliases = self._handle_aliases_handle_aliases()
738 except (ValueError, TypeError) as e:
739 # Use exceptions here because it isn't safe to call fail_json until no_log is processed
740 print('\n{"failed": true, "msg": "Module alias error: %s"}' % to_native(e))
741 sys.exit(1)
742
743 self._handle_no_log_values_handle_no_log_values()
744
745 # check the locale as set by the current environment, and reset to
746 # a known valid (LANG=C) if it's an invalid/unavailable locale
747 self._check_locale_check_locale()
748
749 self._check_arguments_check_arguments(check_invalid_arguments)
750
751 # check exclusive early
752 if not bypass_checks:
753 self._check_mutually_exclusive_check_mutually_exclusive(mutually_exclusive)
754
755 self._set_defaults_set_defaults(pre=True)
756
757 self._CHECK_ARGUMENT_TYPES_DISPATCHER_CHECK_ARGUMENT_TYPES_DISPATCHER = {
758 'str': self._check_type_str_check_type_str,
759 'list': self._check_type_list_check_type_list,
760 'dict': self._check_type_dict_check_type_dict,
761 'bool': self._check_type_bool_check_type_bool,
762 'int': self._check_type_int_check_type_int,
763 'float': self._check_type_float_check_type_float,
764 'path': self._check_type_path_check_type_path,
765 'raw': self._check_type_raw_check_type_raw,
766 'jsonarg': self._check_type_jsonarg_check_type_jsonarg,
767 'json': self._check_type_jsonarg_check_type_jsonarg,
768 'bytes': self._check_type_bytes_check_type_bytes,
769 'bits': self._check_type_bits_check_type_bits,
770 }
771 if not bypass_checks:
772 self._check_required_arguments_check_required_arguments()
773 self._check_argument_types_check_argument_types()
774 self._check_argument_values_check_argument_values()
775 self._check_required_together_check_required_together(required_together)
776 self._check_required_one_of_check_required_one_of(required_one_of)
777 self._check_required_if_check_required_if(required_if)
778 self._check_required_by_check_required_by(required_by)
779
780 self._set_defaults_set_defaults(pre=False)
781
782 # deal with options sub-spec
783 self._handle_options_handle_options()
784
785 if not self.no_logno_log:
786 self._log_invocation_log_invocation()
787
788 # finally, make sure we're in a sane working dir
789 self._set_cwd_set_cwd()
790
791 # Do this at the end so that logging parameters have been set up
792 # This is to warn third party module authors that the functionality is going away.
793 # We exclude uri and zfs as they have their own deprecation warnings for users and we'll
794 # make sure to update their code to stop using check_invalid_arguments when 2.9 rolls around
795 if module_set_check_invalid_arguments and self._name_name not in ('uri', 'zfs'):
796 self.deprecatedeprecate('Setting check_invalid_arguments is deprecated and will be removed.'
797 ' Update the code for this module In the future, AnsibleModule will'
798 ' always check for invalid arguments.', version='2.9')
799
800 @property
801 def tmpdir(self):
802 # if _ansible_tmpdir was not set and we have a remote_tmp,
803 # the module needs to create it and clean it up once finished.
804 # otherwise we create our own module tmp dir from the system defaults
805 if self._tmpdir_tmpdir is None:
806 basedir = None
807
808 if self._remote_tmp is not None:
809 basedir = os.path.expanduser(os.path.expandvars(self._remote_tmp))
810
811 if basedir is not None and not os.path.exists(basedir):
812 try:
813 os.makedirs(basedir, mode=0o700)
814 except (OSError, IOError) as e:
815 self.warnwarn("Unable to use %s as temporary directory, "
816 "failing back to system: %s" % (basedir, to_native(e)))
817 basedir = None
818 else:
819 self.warnwarn("Module remote_tmp %s did not exist and was "
820 "created with a mode of 0700, this may cause"
821 " issues when running as another user. To "
822 "avoid this, create the remote_tmp dir with "
823 "the correct permissions manually" % basedir)
824
825 basefile = "ansible-moduletmp-%s-" % time.time()
826 try:
827 tmpdir = tempfile.mkdtemp(prefix=basefile, dir=basedir)
828 except (OSError, IOError) as e:
829 self.fail_jsonfail_json(
830 msg="Failed to create remote module tmp path at dir %s "
831 "with prefix %s: %s" % (basedir, basefile, to_native(e))
832 )
833 if not self._keep_remote_files:
834 atexit.register(shutil.rmtree, tmpdir)
835 self._tmpdir_tmpdir = tmpdir
836
837 return self._tmpdir_tmpdir
838
839 def warn(self, warning):
840
841 if isinstance(warning, string_types):
842 self._warnings_warnings.append(warning)
843 self.loglog('[WARNING] %s' % warning)
844 else:
845 raise TypeError("warn requires a string not a %s" % type(warning))
846
847 def deprecate(self, msg, version=None, date=None, collection_name=None):
848 # `date` and `collection_name` are Ansible 2.10 parameters. We accept and ignore them,
849 # to avoid modules/plugins from 2.10 conformant collections to break with new enough
850 # versions of Ansible 2.9.
851 if isinstance(msg, string_types):
852 self._deprecations_deprecations.append({
853 'msg': msg,
854 'version': version
855 })
856 self.loglog('[DEPRECATION WARNING] %s %s' % (msg, version))
857 else:
858 raise TypeError("deprecate requires a string not a %s" % type(msg))
859
860 def load_file_common_arguments(self, params):
861 '''
862 many modules deal with files, this encapsulates common
863 options that the file module accepts such that it is directly
864 available to all modules and they can share code.
865 '''
866
867 path = params.get('path', params.get('dest', None))
868 if path is None:
869 return {}
870 else:
871 path = os.path.expanduser(os.path.expandvars(path))
872
873 b_path = to_bytes(path, errors='surrogate_or_strict')
874 # if the path is a symlink, and we're following links, get
875 # the target of the link instead for testing
876 if params.get('follow', False) and os.path.islink(b_path):
877 b_path = os.path.realpath(b_path)
878 path = to_native(b_path)
879
880 mode = params.get('mode', None)
881 owner = params.get('owner', None)
882 group = params.get('group', None)
883
884 # selinux related options
885 seuser = params.get('seuser', None)
886 serole = params.get('serole', None)
887 setype = params.get('setype', None)
888 selevel = params.get('selevel', None)
889 secontext = [seuser, serole, setype]
890
891 if self.selinux_mls_enabledselinux_mls_enabled():
892 secontext.append(selevel)
893
894 default_secontext = self.selinux_default_contextselinux_default_context(path)
895 for i in range(len(default_secontext)):
896 if i is not None and secontext[i] == '_default':
897 secontext[i] = default_secontext[i]
898
899 attributes = params.get('attributes', None)
900 return dict(
901 path=path, mode=mode, owner=owner, group=group,
902 seuser=seuser, serole=serole, setype=setype,
903 selevel=selevel, secontext=secontext, attributes=attributes,
904 )
905
906 # Detect whether using selinux that is MLS-aware.
907 # While this means you can set the level/range with
908 # selinux.lsetfilecon(), it may or may not mean that you
909 # will get the selevel as part of the context returned
910 # by selinux.lgetfilecon().
911
913 if not HAVE_SELINUX:
914 return False
915 if selinux.is_selinux_mls_enabled() == 1:
916 return True
917 else:
918 return False
919
921 if not HAVE_SELINUX:
922 seenabled = self.get_bin_pathget_bin_path('selinuxenabled')
923 if seenabled is not None:
924 (rc, out, err) = self.run_commandrun_command(seenabled)
925 if rc == 0:
926 self.fail_jsonfail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
927 return False
928 if selinux.is_selinux_enabled() == 1:
929 return True
930 else:
931 return False
932
933 # Determine whether we need a placeholder for selevel/mls
935 context = [None, None, None]
936 if self.selinux_mls_enabledselinux_mls_enabled():
937 context.append(None)
938 return context
939
940 # If selinux fails to find a default, return an array of None
941 def selinux_default_context(self, path, mode=0):
942 context = self.selinux_initial_contextselinux_initial_context()
943 if not HAVE_SELINUX or not self.selinux_enabledselinux_enabled():
944 return context
945 try:
946 ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
947 except OSError:
948 return context
949 if ret[0] == -1:
950 return context
951 # Limit split to 4 because the selevel, the last in the list,
952 # may contain ':' characters
953 context = ret[1].split(':', 3)
954 return context
955
956 def selinux_context(self, path):
957 context = self.selinux_initial_contextselinux_initial_context()
958 if not HAVE_SELINUX or not self.selinux_enabledselinux_enabled():
959 return context
960 try:
961 ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
962 except OSError as e:
963 if e.errno == errno.ENOENT:
964 self.fail_jsonfail_json(path=path, msg='path %s does not exist' % path)
965 else:
966 self.fail_jsonfail_json(path=path, msg='failed to retrieve selinux context')
967 if ret[0] == -1:
968 return context
969 # Limit split to 4 because the selevel, the last in the list,
970 # may contain ':' characters
971 context = ret[1].split(':', 3)
972 return context
973
974 def user_and_group(self, path, expand=True):
975 b_path = to_bytes(path, errors='surrogate_or_strict')
976 if expand:
977 b_path = os.path.expanduser(os.path.expandvars(b_path))
978 st = os.lstat(b_path)
979 uid = st.st_uid
980 gid = st.st_gid
981 return (uid, gid)
982
983 def find_mount_point(self, path):
984 path_is_bytes = False
985 if isinstance(path, binary_type):
986 path_is_bytes = True
987
988 b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
989 while not os.path.ismount(b_path):
990 b_path = os.path.dirname(b_path)
991
992 if path_is_bytes:
993 return b_path
994
995 return to_text(b_path, errors='surrogate_or_strict')
996
997 def is_special_selinux_path(self, path):
998 """
999 Returns a tuple containing (True, selinux_context) if the given path is on a
1000 NFS or other 'special' fs mount point, otherwise the return will be (False, None).
1001 """
1002 try:
1003 f = open('/proc/mounts', 'r')
1004 mount_data = f.readlines()
1005 f.close()
1006 except Exception:
1007 return (False, None)
1008
1009 path_mount_point = self.find_mount_pointfind_mount_point(path)
1010
1011 for line in mount_data:
1012 (device, mount_point, fstype, options, rest) = line.split(' ', 4)
1013 if to_bytes(path_mount_point) == to_bytes(mount_point):
1014 for fs in self._selinux_special_fs:
1015 if fs in fstype:
1016 special_context = self.selinux_contextselinux_context(path_mount_point)
1017 return (True, special_context)
1018
1019 return (False, None)
1020
1021 def set_default_selinux_context(self, path, changed):
1022 if not HAVE_SELINUX or not self.selinux_enabledselinux_enabled():
1023 return changed
1024 context = self.selinux_default_contextselinux_default_context(path)
1025 return self.set_context_if_differentset_context_if_different(path, context, False)
1026
1027 def set_context_if_different(self, path, context, changed, diff=None):
1028
1029 if not HAVE_SELINUX or not self.selinux_enabledselinux_enabled():
1030 return changed
1031
1032 if self.check_file_absent_if_check_modecheck_file_absent_if_check_mode(path):
1033 return True
1034
1035 cur_context = self.selinux_contextselinux_context(path)
1036 new_context = list(cur_context)
1037 # Iterate over the current context instead of the
1038 # argument context, which may have selevel.
1039
1040 (is_special_se, sp_context) = self.is_special_selinux_pathis_special_selinux_path(path)
1041 if is_special_se:
1042 new_context = sp_context
1043 else:
1044 for i in range(len(cur_context)):
1045 if len(context) > i:
1046 if context[i] is not None and context[i] != cur_context[i]:
1047 new_context[i] = context[i]
1048 elif context[i] is None:
1049 new_context[i] = cur_context[i]
1050
1051 if cur_context != new_context:
1052 if diff is not None:
1053 if 'before' not in diff:
1054 diff['before'] = {}
1055 diff['before']['secontext'] = cur_context
1056 if 'after' not in diff:
1057 diff['after'] = {}
1058 diff['after']['secontext'] = new_context
1059
1060 try:
1061 if self.check_modecheck_mode:
1062 return True
1063 rc = selinux.lsetfilecon(to_native(path), ':'.join(new_context))
1064 except OSError as e:
1065 self.fail_jsonfail_json(path=path, msg='invalid selinux context: %s' % to_native(e),
1066 new_context=new_context, cur_context=cur_context, input_was=context)
1067 if rc != 0:
1068 self.fail_jsonfail_json(path=path, msg='set selinux context failed')
1069 changed = True
1070 return changed
1071
1072 def set_owner_if_different(self, path, owner, changed, diff=None, expand=True):
1073
1074 if owner is None:
1075 return changed
1076
1077 b_path = to_bytes(path, errors='surrogate_or_strict')
1078 if expand:
1079 b_path = os.path.expanduser(os.path.expandvars(b_path))
1080
1081 if self.check_file_absent_if_check_modecheck_file_absent_if_check_mode(b_path):
1082 return True
1083
1084 orig_uid, orig_gid = self.user_and_groupuser_and_group(b_path, expand)
1085 try:
1086 uid = int(owner)
1087 except ValueError:
1088 try:
1089 uid = pwd.getpwnam(owner).pw_uid
1090 except KeyError:
1091 path = to_text(b_path)
1092 self.fail_jsonfail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
1093
1094 if orig_uid != uid:
1095 if diff is not None:
1096 if 'before' not in diff:
1097 diff['before'] = {}
1098 diff['before']['owner'] = orig_uid
1099 if 'after' not in diff:
1100 diff['after'] = {}
1101 diff['after']['owner'] = uid
1102
1103 if self.check_modecheck_mode:
1104 return True
1105 try:
1106 os.lchown(b_path, uid, -1)
1107 except (IOError, OSError) as e:
1108 path = to_text(b_path)
1109 self.fail_jsonfail_json(path=path, msg='chown failed: %s' % (to_text(e)))
1110 changed = True
1111 return changed
1112
1113 def set_group_if_different(self, path, group, changed, diff=None, expand=True):
1114
1115 if group is None:
1116 return changed
1117
1118 b_path = to_bytes(path, errors='surrogate_or_strict')
1119 if expand:
1120 b_path = os.path.expanduser(os.path.expandvars(b_path))
1121
1122 if self.check_file_absent_if_check_modecheck_file_absent_if_check_mode(b_path):
1123 return True
1124
1125 orig_uid, orig_gid = self.user_and_groupuser_and_group(b_path, expand)
1126 try:
1127 gid = int(group)
1128 except ValueError:
1129 try:
1130 gid = grp.getgrnam(group).gr_gid
1131 except KeyError:
1132 path = to_text(b_path)
1133 self.fail_jsonfail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
1134
1135 if orig_gid != gid:
1136 if diff is not None:
1137 if 'before' not in diff:
1138 diff['before'] = {}
1139 diff['before']['group'] = orig_gid
1140 if 'after' not in diff:
1141 diff['after'] = {}
1142 diff['after']['group'] = gid
1143
1144 if self.check_modecheck_mode:
1145 return True
1146 try:
1147 os.lchown(b_path, -1, gid)
1148 except OSError:
1149 path = to_text(b_path)
1150 self.fail_jsonfail_json(path=path, msg='chgrp failed')
1151 changed = True
1152 return changed
1153
1154 def set_mode_if_different(self, path, mode, changed, diff=None, expand=True):
1155
1156 if mode is None:
1157 return changed
1158
1159 b_path = to_bytes(path, errors='surrogate_or_strict')
1160 if expand:
1161 b_path = os.path.expanduser(os.path.expandvars(b_path))
1162 path_stat = os.lstat(b_path)
1163
1164 if self.check_file_absent_if_check_modecheck_file_absent_if_check_mode(b_path):
1165 return True
1166
1167 if not isinstance(mode, int):
1168 try:
1169 mode = int(mode, 8)
1170 except Exception:
1171 try:
1172 mode = self._symbolic_mode_to_octal_symbolic_mode_to_octal(path_stat, mode)
1173 except Exception as e:
1174 path = to_text(b_path)
1175 self.fail_jsonfail_json(path=path,
1176 msg="mode must be in octal or symbolic form",
1177 details=to_native(e))
1178
1179 if mode != stat.S_IMODE(mode):
1180 # prevent mode from having extra info orbeing invalid long number
1181 path = to_text(b_path)
1182 self.fail_jsonfail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
1183
1184 prev_mode = stat.S_IMODE(path_stat.st_mode)
1185
1186 if prev_mode != mode:
1187
1188 if diff is not None:
1189 if 'before' not in diff:
1190 diff['before'] = {}
1191 diff['before']['mode'] = '0%03o' % prev_mode
1192 if 'after' not in diff:
1193 diff['after'] = {}
1194 diff['after']['mode'] = '0%03o' % mode
1195
1196 if self.check_modecheck_mode:
1197 return True
1198 # FIXME: comparison against string above will cause this to be executed
1199 # every time
1200 try:
1201 if hasattr(os, 'lchmod'):
1202 os.lchmod(b_path, mode)
1203 else:
1204 if not os.path.islink(b_path):
1205 os.chmod(b_path, mode)
1206 else:
1207 # Attempt to set the perms of the symlink but be
1208 # careful not to change the perms of the underlying
1209 # file while trying
1210 underlying_stat = os.stat(b_path)
1211 os.chmod(b_path, mode)
1212 new_underlying_stat = os.stat(b_path)
1213 if underlying_stat.st_mode != new_underlying_stat.st_mode:
1214 os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
1215 except OSError as e:
1216 if os.path.islink(b_path) and e.errno in (
1217 errno.EACCES, # can't access symlink in sticky directory (stat)
1218 errno.EPERM, # can't set mode on symbolic links (chmod)
1219 errno.EROFS, # can't set mode on read-only filesystem
1220 ):
1221 pass
1222 elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
1223 pass
1224 else:
1225 raise
1226 except Exception as e:
1227 path = to_text(b_path)
1228 self.fail_jsonfail_json(path=path, msg='chmod failed', details=to_native(e),
1229 exception=traceback.format_exc())
1230
1231 path_stat = os.lstat(b_path)
1232 new_mode = stat.S_IMODE(path_stat.st_mode)
1233
1234 if new_mode != prev_mode:
1235 changed = True
1236 return changed
1237
1238 def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True):
1239
1240 if attributes is None:
1241 return changed
1242
1243 b_path = to_bytes(path, errors='surrogate_or_strict')
1244 if expand:
1245 b_path = os.path.expanduser(os.path.expandvars(b_path))
1246
1247 if self.check_file_absent_if_check_modecheck_file_absent_if_check_mode(b_path):
1248 return True
1249
1250 existing = self.get_file_attributesget_file_attributes(b_path)
1251
1252 attr_mod = '='
1253 if attributes.startswith(('-', '+')):
1254 attr_mod = attributes[0]
1255 attributes = attributes[1:]
1256
1257 if existing.get('attr_flags', '') != attributes or attr_mod == '-':
1258 attrcmd = self.get_bin_pathget_bin_path('chattr')
1259 if attrcmd:
1260 attrcmd = [attrcmd, '%s%s' % (attr_mod, attributes), b_path]
1261 changed = True
1262
1263 if diff is not None:
1264 if 'before' not in diff:
1265 diff['before'] = {}
1266 diff['before']['attributes'] = existing.get('attr_flags')
1267 if 'after' not in diff:
1268 diff['after'] = {}
1269 diff['after']['attributes'] = '%s%s' % (attr_mod, attributes)
1270
1271 if not self.check_modecheck_mode:
1272 try:
1273 rc, out, err = self.run_commandrun_command(attrcmd)
1274 if rc != 0 or err:
1275 raise Exception("Error while setting attributes: %s" % (out + err))
1276 except Exception as e:
1277 self.fail_jsonfail_json(path=to_text(b_path), msg='chattr failed',
1278 details=to_native(e), exception=traceback.format_exc())
1279 return changed
1280
1281 def get_file_attributes(self, path):
1282 output = {}
1283 attrcmd = self.get_bin_pathget_bin_path('lsattr', False)
1284 if attrcmd:
1285 attrcmd = [attrcmd, '-vd', path]
1286 try:
1287 rc, out, err = self.run_commandrun_command(attrcmd)
1288 if rc == 0:
1289 res = out.split()
1290 output['attr_flags'] = res[1].replace('-', '').strip()
1291 output['version'] = res[0].strip()
1292 output['attributes'] = format_attributes(output['attr_flags'])
1293 except Exception:
1294 pass
1295 return output
1296
1297 @classmethod
1298 def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode):
1299 """
1300 This enables symbolic chmod string parsing as stated in the chmod man-page
1301
1302 This includes things like: "u=rw-x+X,g=r-x+X,o=r-x+X"
1303 """
1304
1305 new_mode = stat.S_IMODE(path_stat.st_mode)
1306
1307 # Now parse all symbolic modes
1308 for mode in symbolic_mode.split(','):
1309 # Per single mode. This always contains a '+', '-' or '='
1310 # Split it on that
1311 permlist = MODE_OPERATOR_RE.split(mode)
1312
1313 # And find all the operators
1314 opers = MODE_OPERATOR_RE.findall(mode)
1315
1316 # The user(s) where it's all about is the first element in the
1317 # 'permlist' list. Take that and remove it from the list.
1318 # An empty user or 'a' means 'all'.
1319 users = permlist.pop(0)
1320 use_umask = (users == '')
1321 if users == 'a' or users == '':
1322 users = 'ugo'
1323
1324 # Check if there are illegal characters in the user list
1325 # They can end up in 'users' because they are not split
1326 if USERS_RE.match(users):
1327 raise ValueError("bad symbolic permission for mode: %s" % mode)
1328
1329 # Now we have two list of equal length, one contains the requested
1330 # permissions and one with the corresponding operators.
1331 for idx, perms in enumerate(permlist):
1332 # Check if there are illegal characters in the permissions
1333 if PERMS_RE.match(perms):
1334 raise ValueError("bad symbolic permission for mode: %s" % mode)
1335
1336 for user in users:
1337 mode_to_apply = cls._get_octal_mode_from_symbolic_perms_get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask)
1338 new_mode = cls._apply_operation_to_mode_apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
1339
1340 return new_mode
1341
1342 @staticmethod
1343 def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode):
1344 if operator == '=':
1345 if user == 'u':
1346 mask = stat.S_IRWXU | stat.S_ISUID
1347 elif user == 'g':
1348 mask = stat.S_IRWXG | stat.S_ISGID
1349 elif user == 'o':
1350 mask = stat.S_IRWXO | stat.S_ISVTX
1351
1352 # mask out u, g, or o permissions from current_mode and apply new permissions
1353 inverse_mask = mask ^ PERM_BITS
1354 new_mode = (current_mode & inverse_mask) | mode_to_apply
1355 elif operator == '+':
1356 new_mode = current_mode | mode_to_apply
1357 elif operator == '-':
1358 new_mode = current_mode - (current_mode & mode_to_apply)
1359 return new_mode
1360
1361 @staticmethod
1362 def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask):
1363 prev_mode = stat.S_IMODE(path_stat.st_mode)
1364
1365 is_directory = stat.S_ISDIR(path_stat.st_mode)
1366 has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
1367 apply_X_permission = is_directory or has_x_permissions
1368
1369 # Get the umask, if the 'user' part is empty, the effect is as if (a) were
1370 # given, but bits that are set in the umask are not affected.
1371 # We also need the "reversed umask" for masking
1372 umask = os.umask(0)
1373 os.umask(umask)
1374 rev_umask = umask ^ PERM_BITS
1375
1376 # Permission bits constants documented at:
1377 # http://docs.python.org/2/library/stat.html#stat.S_ISUID
1378 if apply_X_permission:
1379 X_perms = {
1380 'u': {'X': stat.S_IXUSR},
1381 'g': {'X': stat.S_IXGRP},
1382 'o': {'X': stat.S_IXOTH},
1383 }
1384 else:
1385 X_perms = {
1386 'u': {'X': 0},
1387 'g': {'X': 0},
1388 'o': {'X': 0},
1389 }
1390
1391 user_perms_to_modes = {
1392 'u': {
1393 'r': rev_umask & stat.S_IRUSR if use_umask else stat.S_IRUSR,
1394 'w': rev_umask & stat.S_IWUSR if use_umask else stat.S_IWUSR,
1395 'x': rev_umask & stat.S_IXUSR if use_umask else stat.S_IXUSR,
1396 's': stat.S_ISUID,
1397 't': 0,
1398 'u': prev_mode & stat.S_IRWXU,
1399 'g': (prev_mode & stat.S_IRWXG) << 3,
1400 'o': (prev_mode & stat.S_IRWXO) << 6},
1401 'g': {
1402 'r': rev_umask & stat.S_IRGRP if use_umask else stat.S_IRGRP,
1403 'w': rev_umask & stat.S_IWGRP if use_umask else stat.S_IWGRP,
1404 'x': rev_umask & stat.S_IXGRP if use_umask else stat.S_IXGRP,
1405 's': stat.S_ISGID,
1406 't': 0,
1407 'u': (prev_mode & stat.S_IRWXU) >> 3,
1408 'g': prev_mode & stat.S_IRWXG,
1409 'o': (prev_mode & stat.S_IRWXO) << 3},
1410 'o': {
1411 'r': rev_umask & stat.S_IROTH if use_umask else stat.S_IROTH,
1412 'w': rev_umask & stat.S_IWOTH if use_umask else stat.S_IWOTH,
1413 'x': rev_umask & stat.S_IXOTH if use_umask else stat.S_IXOTH,
1414 's': 0,
1415 't': stat.S_ISVTX,
1416 'u': (prev_mode & stat.S_IRWXU) >> 6,
1417 'g': (prev_mode & stat.S_IRWXG) >> 3,
1418 'o': prev_mode & stat.S_IRWXO},
1419 }
1420
1421 # Insert X_perms into user_perms_to_modes
1422 for key, value in X_perms.items():
1423 user_perms_to_modes[key].update(value)
1424
1425 def or_reduce(mode, perm):
1426 return mode | user_perms_to_modes[user][perm]
1427
1428 return reduce(or_reduce, perms, 0)
1429
1430 def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1431 # set modes owners and context as needed
1432 changed = self.set_context_if_differentset_context_if_different(
1433 file_args['path'], file_args['secontext'], changed, diff
1434 )
1435 changed = self.set_owner_if_differentset_owner_if_different(
1436 file_args['path'], file_args['owner'], changed, diff, expand
1437 )
1438 changed = self.set_group_if_differentset_group_if_different(
1439 file_args['path'], file_args['group'], changed, diff, expand
1440 )
1441 changed = self.set_mode_if_differentset_mode_if_different(
1442 file_args['path'], file_args['mode'], changed, diff, expand
1443 )
1444 changed = self.set_attributes_if_differentset_attributes_if_different(
1445 file_args['path'], file_args['attributes'], changed, diff, expand
1446 )
1447 return changed
1448
1450 return self.check_modecheck_mode and not os.path.exists(file_path)
1451
1452 def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1453 return self.set_fs_attributes_if_differentset_fs_attributes_if_different(file_args, changed, diff, expand)
1454
1455 def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1456 return self.set_fs_attributes_if_differentset_fs_attributes_if_different(file_args, changed, diff, expand)
1457
1458 def add_path_info(self, kwargs):
1459 '''
1460 for results that are files, supplement the info about the file
1461 in the return path with stats about the file path.
1462 '''
1463
1464 path = kwargs.get('path', kwargs.get('dest', None))
1465 if path is None:
1466 return kwargs
1467 b_path = to_bytes(path, errors='surrogate_or_strict')
1468 if os.path.exists(b_path):
1469 (uid, gid) = self.user_and_groupuser_and_group(path)
1470 kwargs['uid'] = uid
1471 kwargs['gid'] = gid
1472 try:
1473 user = pwd.getpwuid(uid)[0]
1474 except KeyError:
1475 user = str(uid)
1476 try:
1477 group = grp.getgrgid(gid)[0]
1478 except KeyError:
1479 group = str(gid)
1480 kwargs['owner'] = user
1481 kwargs['group'] = group
1482 st = os.lstat(b_path)
1483 kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
1484 # secontext not yet supported
1485 if os.path.islink(b_path):
1486 kwargs['state'] = 'link'
1487 elif os.path.isdir(b_path):
1488 kwargs['state'] = 'directory'
1489 elif os.stat(b_path).st_nlink > 1:
1490 kwargs['state'] = 'hard'
1491 else:
1492 kwargs['state'] = 'file'
1493 if HAVE_SELINUX and self.selinux_enabledselinux_enabled():
1494 kwargs['secontext'] = ':'.join(self.selinux_contextselinux_context(path))
1495 kwargs['size'] = st[stat.ST_SIZE]
1496 return kwargs
1497
1498 def _check_locale(self):
1499 '''
1500 Uses the locale module to test the currently set locale
1501 (per the LANG and LC_CTYPE environment settings)
1502 '''
1503 try:
1504 # setting the locale to '' uses the default locale
1505 # as it would be returned by locale.getdefaultlocale()
1506 locale.setlocale(locale.LC_ALL, '')
1507 except locale.Error:
1508 # fallback to the 'C' locale, which may cause unicode
1509 # issues but is preferable to simply failing because
1510 # of an unknown locale
1511 locale.setlocale(locale.LC_ALL, 'C')
1512 os.environ['LANG'] = 'C'
1513 os.environ['LC_ALL'] = 'C'
1514 os.environ['LC_MESSAGES'] = 'C'
1515 except Exception as e:
1516 self.fail_jsonfail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" %
1517 to_native(e), exception=traceback.format_exc())
1518
1519 def _handle_aliases(self, spec=None, param=None, option_prefix=''):
1520 if spec is None:
1521 spec = self.argument_specargument_spec
1522 if param is None:
1523 param = self.paramsparams
1524
1525 # this uses exceptions as it happens before we can safely call fail_json
1526 alias_warnings = []
1527 alias_results, self._legal_inputs_legal_inputs = handle_aliases(spec, param, alias_warnings=alias_warnings)
1528 for option, alias in alias_warnings:
1529 self._warnings_warnings.append('Both option %s and its alias %s are set.' % (option_prefix + option, option_prefix + alias))
1530
1531 deprecated_aliases = []
1532 for i in spec.keys():
1533 if 'deprecated_aliases' in spec[i].keys():
1534 for alias in spec[i]['deprecated_aliases']:
1535 deprecated_aliases.append(alias)
1536
1537 for deprecation in deprecated_aliases:
1538 if deprecation['name'] in param.keys():
1539 self._deprecations_deprecations.append(
1540 {'msg': "Alias '%s' is deprecated. See the module docs for more information" % deprecation['name'],
1541 'version': deprecation.get('version')})
1542 return alias_results
1543
1544 def _handle_no_log_values(self, spec=None, param=None):
1545 if spec is None:
1546 spec = self.argument_specargument_spec
1547 if param is None:
1548 param = self.paramsparams
1549
1550 try:
1551 self.no_log_valuesno_log_values.update(list_no_log_values(spec, param))
1552 except TypeError as te:
1553 self.fail_jsonfail_json(msg="Failure when processing no_log parameters. Module invocation will be hidden. "
1554 "%s" % to_native(te), invocation={'module_args': 'HIDDEN DUE TO FAILURE'})
1555 self._deprecations_deprecations.extend(list_deprecations(spec, param))
1556
1557 def _check_arguments(self, check_invalid_arguments, spec=None, param=None, legal_inputs=None):
1558 self._syslog_facility_syslog_facility = 'LOG_USER'
1559 unsupported_parameters = set()
1560 if spec is None:
1561 spec = self.argument_specargument_spec
1562 if param is None:
1563 param = self.paramsparams
1564 if legal_inputs is None:
1565 legal_inputs = self._legal_inputs_legal_inputs
1566
1567 for k in list(param.keys()):
1568
1569 if check_invalid_arguments and k not in legal_inputs:
1570 unsupported_parameters.add(k)
1571
1572 for k in PASS_VARS:
1573 # handle setting internal properties from internal ansible vars
1574 param_key = '_ansible_%s' % k
1575 if param_key in param:
1576 if k in PASS_BOOLS:
1577 setattr(self, PASS_VARS[k][0], self.booleanboolean(param[param_key]))
1578 else:
1579 setattr(self, PASS_VARS[k][0], param[param_key])
1580
1581 # clean up internal top level params:
1582 if param_key in self.paramsparams:
1583 del self.paramsparams[param_key]
1584 else:
1585 # use defaults if not already set
1586 if not hasattr(self, PASS_VARS[k][0]):
1587 setattr(self, PASS_VARS[k][0], PASS_VARS[k][1])
1588
1589 if unsupported_parameters:
1590 msg = "Unsupported parameters for (%s) module: %s" % (self._name_name, ', '.join(sorted(list(unsupported_parameters))))
1591 if self._options_context_options_context:
1592 msg += " found in %s." % " -> ".join(self._options_context_options_context)
1593 msg += " Supported parameters include: %s" % (', '.join(sorted(spec.keys())))
1594 self.fail_jsonfail_json(msg=msg)
1595 if self.check_modecheck_mode and not self.supports_check_modesupports_check_mode:
1596 self.exit_jsonexit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name_name)
1597
1598 def _count_terms(self, check, param=None):
1599 if param is None:
1600 param = self.paramsparams
1601 return count_terms(check, param)
1602
1603 def _check_mutually_exclusive(self, spec, param=None):
1604 if param is None:
1605 param = self.paramsparams
1606
1607 try:
1608 check_mutually_exclusive(spec, param)
1609 except TypeError as e:
1610 msg = to_native(e)
1611 if self._options_context_options_context:
1612 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1613 self.fail_jsonfail_json(msg=msg)
1614
1615 def _check_required_one_of(self, spec, param=None):
1616 if spec is None:
1617 return
1618
1619 if param is None:
1620 param = self.paramsparams
1621
1622 try:
1623 check_required_one_of(spec, param)
1624 except TypeError as e:
1625 msg = to_native(e)
1626 if self._options_context_options_context:
1627 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1628 self.fail_jsonfail_json(msg=msg)
1629
1630 def _check_required_together(self, spec, param=None):
1631 if spec is None:
1632 return
1633 if param is None:
1634 param = self.paramsparams
1635
1636 try:
1637 check_required_together(spec, param)
1638 except TypeError as e:
1639 msg = to_native(e)
1640 if self._options_context_options_context:
1641 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1642 self.fail_jsonfail_json(msg=msg)
1643
1644 def _check_required_by(self, spec, param=None):
1645 if spec is None:
1646 return
1647 if param is None:
1648 param = self.paramsparams
1649
1650 try:
1651 check_required_by(spec, param)
1652 except TypeError as e:
1653 self.fail_jsonfail_json(msg=to_native(e))
1654
1655 def _check_required_arguments(self, spec=None, param=None):
1656 if spec is None:
1657 spec = self.argument_specargument_spec
1658 if param is None:
1659 param = self.paramsparams
1660
1661 try:
1662 check_required_arguments(spec, param)
1663 except TypeError as e:
1664 msg = to_native(e)
1665 if self._options_context_options_context:
1666 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1667 self.fail_jsonfail_json(msg=msg)
1668
1669 def _check_required_if(self, spec, param=None):
1670 ''' ensure that parameters which conditionally required are present '''
1671 if spec is None:
1672 return
1673 if param is None:
1674 param = self.paramsparams
1675
1676 try:
1677 check_required_if(spec, param)
1678 except TypeError as e:
1679 msg = to_native(e)
1680 if self._options_context_options_context:
1681 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1682 self.fail_jsonfail_json(msg=msg)
1683
1684 def _check_argument_values(self, spec=None, param=None):
1685 ''' ensure all arguments have the requested values, and there are no stray arguments '''
1686 if spec is None:
1687 spec = self.argument_specargument_spec
1688 if param is None:
1689 param = self.paramsparams
1690 for (k, v) in spec.items():
1691 choices = v.get('choices', None)
1692 if choices is None:
1693 continue
1694 if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
1695 if k in param:
1696 # Allow one or more when type='list' param with choices
1697 if isinstance(param[k], list):
1698 diff_list = ", ".join([item for item in param[k] if item not in choices])
1699 if diff_list:
1700 choices_str = ", ".join([to_native(c) for c in choices])
1701 msg = "value of %s must be one or more of: %s. Got no match for: %s" % (k, choices_str, diff_list)
1702 if self._options_context_options_context:
1703 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1704 self.fail_jsonfail_json(msg=msg)
1705 elif param[k] not in choices:
1706 # PyYaml converts certain strings to bools. If we can unambiguously convert back, do so before checking
1707 # the value. If we can't figure this out, module author is responsible.
1708 lowered_choices = None
1709 if param[k] == 'False':
1710 lowered_choices = lenient_lowercase(choices)
1711 overlap = BOOLEANS_FALSE.intersection(choices)
1712 if len(overlap) == 1:
1713 # Extract from a set
1714 (param[k],) = overlap
1715
1716 if param[k] == 'True':
1717 if lowered_choices is None:
1718 lowered_choices = lenient_lowercase(choices)
1719 overlap = BOOLEANS_TRUE.intersection(choices)
1720 if len(overlap) == 1:
1721 (param[k],) = overlap
1722
1723 if param[k] not in choices:
1724 choices_str = ", ".join([to_native(c) for c in choices])
1725 msg = "value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
1726 if self._options_context_options_context:
1727 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1728 self.fail_jsonfail_json(msg=msg)
1729 else:
1730 msg = "internal error: choices for argument %s are not iterable: %s" % (k, choices)
1731 if self._options_context_options_context:
1732 msg += " found in %s" % " -> ".join(self._options_context_options_context)
1733 self.fail_jsonfail_json(msg=msg)
1734
1735 def safe_eval(self, value, locals=None, include_exceptions=False):
1736 return safe_eval(value, locals, include_exceptions)
1737
1738 def _check_type_str(self, value):
1739 opts = {
1740 'error': False,
1741 'warn': False,
1742 'ignore': True
1743 }
1744
1745 # Ignore, warn, or error when converting to a string.
1746 allow_conversion = opts.get(self._string_conversion_action_string_conversion_action, True)
1747 try:
1748 return check_type_str(value, allow_conversion)
1749 except TypeError:
1750 common_msg = 'quote the entire value to ensure it does not change.'
1751 if self._string_conversion_action_string_conversion_action == 'error':
1752 msg = common_msg.capitalize()
1753 raise TypeError(to_native(msg))
1754 elif self._string_conversion_action_string_conversion_action == 'warn':
1755 msg = ('The value {0!r} (type {0.__class__.__name__}) in a string field was converted to {1!r} (type string). '
1756 'If this does not look like what you expect, {2}').format(value, to_text(value), common_msg)
1757 self.warnwarn(to_native(msg))
1758 return to_native(value, errors='surrogate_or_strict')
1759
1760 def _check_type_list(self, value):
1761 return check_type_list(value)
1762
1763 def _check_type_dict(self, value):
1764 return check_type_dict(value)
1765
1766 def _check_type_bool(self, value):
1767 return check_type_bool(value)
1768
1769 def _check_type_int(self, value):
1770 return check_type_int(value)
1771
1772 def _check_type_float(self, value):
1773 return check_type_float(value)
1774
1775 def _check_type_path(self, value):
1776 return check_type_path(value)
1777
1778 def _check_type_jsonarg(self, value):
1779 return check_type_jsonarg(value)
1780
1781 def _check_type_raw(self, value):
1782 return check_type_raw(value)
1783
1784 def _check_type_bytes(self, value):
1785 return check_type_bytes(value)
1786
1787 def _check_type_bits(self, value):
1788 return check_type_bits(value)
1789
1790 def _handle_options(self, argument_spec=None, params=None, prefix=''):
1791 ''' deal with options to create sub spec '''
1792 if argument_spec is None:
1793 argument_spec = self.argument_specargument_spec
1794 if params is None:
1795 params = self.paramsparams
1796
1797 for (k, v) in argument_spec.items():
1798 wanted = v.get('type', None)
1799 if wanted == 'dict' or (wanted == 'list' and v.get('elements', '') == 'dict'):
1800 spec = v.get('options', None)
1801 if v.get('apply_defaults', False):
1802 if spec is not None:
1803 if params.get(k) is None:
1804 params[k] = {}
1805 else:
1806 continue
1807 elif spec is None or k not in params or params[k] is None:
1808 continue
1809
1810 self._options_context_options_context.append(k)
1811
1812 if isinstance(params[k], dict):
1813 elements = [params[k]]
1814 else:
1815 elements = params[k]
1816
1817 for idx, param in enumerate(elements):
1818 if not isinstance(param, dict):
1819 self.fail_jsonfail_json(msg="value of %s must be of type dict or list of dict" % k)
1820
1821 new_prefix = prefix + k
1822 if wanted == 'list':
1823 new_prefix += '[%d]' % idx
1824 new_prefix += '.'
1825
1826 self._set_fallbacks_set_fallbacks(spec, param)
1827 options_aliases = self._handle_aliases_handle_aliases(spec, param, option_prefix=new_prefix)
1828
1829 options_legal_inputs = list(spec.keys()) + list(options_aliases.keys())
1830
1831 self._check_arguments_check_arguments(self.check_invalid_argumentscheck_invalid_arguments, spec, param, options_legal_inputs)
1832
1833 # check exclusive early
1834 if not self.bypass_checksbypass_checks:
1835 self._check_mutually_exclusive_check_mutually_exclusive(v.get('mutually_exclusive', None), param)
1836
1837 self._set_defaults_set_defaults(pre=True, spec=spec, param=param)
1838
1839 if not self.bypass_checksbypass_checks:
1840 self._check_required_arguments_check_required_arguments(spec, param)
1841 self._check_argument_types_check_argument_types(spec, param)
1842 self._check_argument_values_check_argument_values(spec, param)
1843
1844 self._check_required_together_check_required_together(v.get('required_together', None), param)
1845 self._check_required_one_of_check_required_one_of(v.get('required_one_of', None), param)
1846 self._check_required_if_check_required_if(v.get('required_if', None), param)
1847 self._check_required_by_check_required_by(v.get('required_by', None), param)
1848
1849 self._set_defaults_set_defaults(pre=False, spec=spec, param=param)
1850
1851 # handle multi level options (sub argspec)
1852 self._handle_options_handle_options(spec, param, new_prefix)
1853 self._options_context_options_context.pop()
1854
1855 def _get_wanted_type(self, wanted, k):
1856 if not callable(wanted):
1857 if wanted is None:
1858 # Mostly we want to default to str.
1859 # For values set to None explicitly, return None instead as
1860 # that allows a user to unset a parameter
1861 wanted = 'str'
1862 try:
1863 type_checker = self._CHECK_ARGUMENT_TYPES_DISPATCHER_CHECK_ARGUMENT_TYPES_DISPATCHER[wanted]
1864 except KeyError:
1865 self.fail_jsonfail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
1866 else:
1867 # set the type_checker to the callable, and reset wanted to the callable's name (or type if it doesn't have one, ala MagicMock)
1868 type_checker = wanted
1869 wanted = getattr(wanted, '__name__', to_native(type(wanted)))
1870
1871 return type_checker, wanted
1872
1873 def _handle_elements(self, wanted, param, values):
1874 type_checker, wanted_name = self._get_wanted_type_get_wanted_type(wanted, param)
1875 validated_params = []
1876 for value in values:
1877 try:
1878 validated_params.append(type_checker(value))
1879 except (TypeError, ValueError) as e:
1880 msg = "Elements value for option %s" % param
1881 if self._options_context_options_context:
1882 msg += " found in '%s'" % " -> ".join(self._options_context_options_context)
1883 msg += " is of type %s and we were unable to convert to %s: %s" % (type(value), wanted_name, to_native(e))
1884 self.fail_jsonfail_json(msg=msg)
1885 return validated_params
1886
1887 def _check_argument_types(self, spec=None, param=None):
1888 ''' ensure all arguments have the requested type '''
1889
1890 if spec is None:
1891 spec = self.argument_specargument_spec
1892 if param is None:
1893 param = self.paramsparams
1894
1895 for (k, v) in spec.items():
1896 wanted = v.get('type', None)
1897 if k not in param:
1898 continue
1899
1900 value = param[k]
1901 if value is None:
1902 continue
1903
1904 type_checker, wanted_name = self._get_wanted_type_get_wanted_type(wanted, k)
1905 try:
1906 param[k] = type_checker(value)
1907 wanted_elements = v.get('elements', None)
1908 if wanted_elements:
1909 if wanted != 'list' or not isinstance(param[k], list):
1910 msg = "Invalid type %s for option '%s'" % (wanted_name, param)
1911 if self._options_context_options_context:
1912 msg += " found in '%s'." % " -> ".join(self._options_context_options_context)
1913 msg += ", elements value check is supported only with 'list' type"
1914 self.fail_jsonfail_json(msg=msg)
1915 param[k] = self._handle_elements_handle_elements(wanted_elements, k, param[k])
1916
1917 except (TypeError, ValueError) as e:
1918 msg = "argument %s is of type %s" % (k, type(value))
1919 if self._options_context_options_context:
1920 msg += " found in '%s'." % " -> ".join(self._options_context_options_context)
1921 msg += " and we were unable to convert to %s: %s" % (wanted_name, to_native(e))
1922 self.fail_jsonfail_json(msg=msg)
1923
1924 def _set_defaults(self, pre=True, spec=None, param=None):
1925 if spec is None:
1926 spec = self.argument_specargument_spec
1927 if param is None:
1928 param = self.paramsparams
1929 for (k, v) in spec.items():
1930 default = v.get('default', None)
1931
1932 # This prevents setting defaults on required items on the 1st run,
1933 # otherwise will set things without a default to None on the 2nd.
1934 if k not in param and (default is not None or not pre):
1935 # Make sure any default value for no_log fields are masked.
1936 if v.get('no_log', False) and default:
1937 self.no_log_valuesno_log_values.add(default)
1938
1939 param[k] = default
1940
1941 def _set_fallbacks(self, spec=None, param=None):
1942 if spec is None:
1943 spec = self.argument_specargument_spec
1944 if param is None:
1945 param = self.paramsparams
1946
1947 for (k, v) in spec.items():
1948 fallback = v.get('fallback', (None,))
1949 fallback_strategy = fallback[0]
1950 fallback_args = []
1951 fallback_kwargs = {}
1952 if k not in param and fallback_strategy is not None:
1953 for item in fallback[1:]:
1954 if isinstance(item, dict):
1955 fallback_kwargs = item
1956 else:
1957 fallback_args = item
1958 try:
1959 fallback_value = fallback_strategy(*fallback_args, **fallback_kwargs)
1960 except AnsibleFallbackNotFound:
1961 continue
1962 else:
1963 if v.get('no_log', False) and fallback_value:
1964 self.no_log_valuesno_log_values.add(fallback_value)
1965 param[k] = fallback_value
1966
1967 def _load_params(self):
1968 ''' read the input and set the params attribute.
1969
1970 This method is for backwards compatibility. The guts of the function
1971 were moved out in 2.1 so that custom modules could read the parameters.
1972 '''
1973 # debug overrides to read args from file or cmdline
1974 self.paramsparams = _load_params()
1975
1976 def _log_to_syslog(self, msg):
1977 if HAS_SYSLOG:
1978 module = 'ansible-%s' % self._name_name
1979 facility = getattr(syslog, self._syslog_facility_syslog_facility, syslog.LOG_USER)
1980 syslog.openlog(str(module), 0, facility)
1981 syslog.syslog(syslog.LOG_INFO, msg)
1982
1983 def debug(self, msg):
1984 if self._debug_debug:
1985 self.loglog('[debug] %s' % msg)
1986
1987 def log(self, msg, log_args=None):
1988
1989 if not self.no_logno_log:
1990
1991 if log_args is None:
1992 log_args = dict()
1993
1994 module = 'ansible-%s' % self._name_name
1995 if isinstance(module, binary_type):
1996 module = module.decode('utf-8', 'replace')
1997
1998 # 6655 - allow for accented characters
1999 if not isinstance(msg, (binary_type, text_type)):
2000 raise TypeError("msg should be a string (got %s)" % type(msg))
2001
2002 # We want journal to always take text type
2003 # syslog takes bytes on py2, text type on py3
2004 if isinstance(msg, binary_type):
2005 journal_msg = remove_values(msg.decode('utf-8', 'replace'), self.no_log_valuesno_log_values)
2006 else:
2007 # TODO: surrogateescape is a danger here on Py3
2008 journal_msg = remove_values(msg, self.no_log_valuesno_log_values)
2009
2010 if PY3:
2011 syslog_msg = journal_msg
2012 else:
2013 syslog_msg = journal_msg.encode('utf-8', 'replace')
2014
2015 if has_journal:
2016 journal_args = [("MODULE", os.path.basename(__file__))]
2017 for arg in log_args:
2018 journal_args.append((arg.upper(), str(log_args[arg])))
2019 try:
2020 if HAS_SYSLOG:
2021 # If syslog_facility specified, it needs to convert
2022 # from the facility name to the facility code, and
2023 # set it as SYSLOG_FACILITY argument of journal.send()
2024 facility = getattr(syslog,
2025 self._syslog_facility_syslog_facility,
2026 syslog.LOG_USER) >> 3
2027 journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
2028 SYSLOG_FACILITY=facility,
2029 **dict(journal_args))
2030 else:
2031 journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
2032 **dict(journal_args))
2033 except IOError:
2034 # fall back to syslog since logging to journal failed
2035 self._log_to_syslog_log_to_syslog(syslog_msg)
2036 else:
2037 self._log_to_syslog_log_to_syslog(syslog_msg)
2038
2040 ''' log that ansible ran the module '''
2041 # TODO: generalize a separate log function and make log_invocation use it
2042 # Sanitize possible password argument when logging.
2043 log_args = dict()
2044
2045 for param in self.paramsparams:
2046 canon = self.aliasesaliases.get(param, param)
2047 arg_opts = self.argument_specargument_spec.get(canon, {})
2048 no_log = arg_opts.get('no_log', None)
2049
2050 # try to proactively capture password/passphrase fields
2051 if no_log is None and PASSWORD_MATCH.search(param):
2052 log_args[param] = 'NOT_LOGGING_PASSWORD'
2053 self.warnwarn('Module did not set no_log for %s' % param)
2054 elif self.booleanboolean(no_log):
2055 log_args[param] = 'NOT_LOGGING_PARAMETER'
2056 else:
2057 param_val = self.paramsparams[param]
2058 if not isinstance(param_val, (text_type, binary_type)):
2059 param_val = str(param_val)
2060 elif isinstance(param_val, text_type):
2061 param_val = param_val.encode('utf-8')
2062 log_args[param] = heuristic_log_sanitize(param_val, self.no_log_valuesno_log_values)
2063
2064 msg = ['%s=%s' % (to_native(arg), to_native(val)) for arg, val in log_args.items()]
2065 if msg:
2066 msg = 'Invoked with %s' % ' '.join(msg)
2067 else:
2068 msg = 'Invoked'
2069
2070 self.loglog(msg, log_args=log_args)
2071
2072 def _set_cwd(self):
2073 try:
2074 cwd = os.getcwd()
2075 if not os.access(cwd, os.F_OK | os.R_OK):
2076 raise Exception()
2077 return cwd
2078 except Exception:
2079 # we don't have access to the cwd, probably because of sudo.
2080 # Try and move to a neutral location to prevent errors
2081 for cwd in [self.tmpdirtmpdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
2082 try:
2083 if os.access(cwd, os.F_OK | os.R_OK):
2084 os.chdir(cwd)
2085 return cwd
2086 except Exception:
2087 pass
2088 # we won't error here, as it may *not* be a problem,
2089 # and we don't want to break modules unnecessarily
2090 return None
2091
2092 def get_bin_path(self, arg, required=False, opt_dirs=None):
2093 '''
2094 Find system executable in PATH.
2095
2096 :param arg: The executable to find.
2097 :param required: if executable is not found and required is ``True``, fail_json
2098 :param opt_dirs: optional list of directories to search in addition to ``PATH``
2099 :returns: if found return full path; otherwise return None
2100 '''
2101
2102 bin_path = None
2103 try:
2104 bin_path = get_bin_path(arg, required, opt_dirs)
2105 except ValueError as e:
2106 self.fail_jsonfail_json(msg=to_text(e))
2107
2108 return bin_path
2109
2110 def boolean(self, arg):
2111 '''Convert the argument to a boolean'''
2112 if arg is None:
2113 return arg
2114
2115 try:
2116 return boolean(arg)
2117 except TypeError as e:
2118 self.fail_jsonfail_json(msg=to_native(e))
2119
2120 def jsonify(self, data):
2121 try:
2122 return jsonify(data)
2123 except UnicodeError as e:
2124 self.fail_jsonfail_json(msg=to_text(e))
2125
2126 def from_json(self, data):
2127 return json.loads(data)
2128
2129 def add_cleanup_file(self, path):
2130 if path not in self.cleanup_filescleanup_files:
2131 self.cleanup_filescleanup_files.append(path)
2132
2134 for path in self.cleanup_filescleanup_files:
2135 self.cleanupcleanup(path)
2136
2137 def _return_formatted(self, kwargs):
2138
2139 self.add_path_infoadd_path_info(kwargs)
2140
2141 if 'invocation' not in kwargs:
2142 kwargs['invocation'] = {'module_args': self.paramsparams}
2143
2144 if 'warnings' in kwargs:
2145 if isinstance(kwargs['warnings'], list):
2146 for w in kwargs['warnings']:
2147 self.warnwarn(w)
2148 else:
2149 self.warnwarn(kwargs['warnings'])
2150
2151 if self._warnings_warnings:
2152 kwargs['warnings'] = self._warnings_warnings
2153
2154 if 'deprecations' in kwargs:
2155 if isinstance(kwargs['deprecations'], list):
2156 for d in kwargs['deprecations']:
2157 if isinstance(d, SEQUENCETYPE) and len(d) == 2:
2158 self.deprecatedeprecate(d[0], version=d[1])
2159 elif isinstance(d, Mapping):
2160 self.deprecatedeprecate(d['msg'], version=d.get('version', None))
2161 else:
2162 self.deprecatedeprecate(d)
2163 else:
2164 self.deprecatedeprecate(kwargs['deprecations'])
2165
2166 if self._deprecations_deprecations:
2167 kwargs['deprecations'] = self._deprecations_deprecations
2168
2169 kwargs = remove_values(kwargs, self.no_log_valuesno_log_values)
2170 print('\n%s' % self.jsonifyjsonify(kwargs))
2171
2172 def exit_json(self, **kwargs):
2173 ''' return from the module, without error '''
2174
2175 self.do_cleanup_filesdo_cleanup_files()
2176 self._return_formatted_return_formatted(kwargs)
2177 sys.exit(0)
2178
2179 def fail_json(self, **kwargs):
2180 ''' return from the module, with an error message '''
2181
2182 if 'msg' not in kwargs:
2183 raise AssertionError("implementation error -- msg to explain the error is required")
2184 kwargs['failed'] = True
2185
2186 # Add traceback if debug or high verbosity and it is missing
2187 # NOTE: Badly named as exception, it really always has been a traceback
2188 if 'exception' not in kwargs and sys.exc_info()[2] and (self._debug_debug or self._verbosity_verbosity >= 3):
2189 if PY2:
2190 # On Python 2 this is the last (stack frame) exception and as such may be unrelated to the failure
2191 kwargs['exception'] = 'WARNING: The below traceback may *not* be related to the actual failure.\n' +\
2192 ''.join(traceback.format_tb(sys.exc_info()[2]))
2193 else:
2194 kwargs['exception'] = ''.join(traceback.format_tb(sys.exc_info()[2]))
2195
2196 self.do_cleanup_filesdo_cleanup_files()
2197 self._return_formatted_return_formatted(kwargs)
2198 sys.exit(1)
2199
2200 def fail_on_missing_params(self, required_params=None):
2201 if not required_params:
2202 return
2203 try:
2204 check_missing_parameters(self.paramsparams, required_params)
2205 except TypeError as e:
2206 self.fail_jsonfail_json(msg=to_native(e))
2207
2208 def digest_from_file(self, filename, algorithm):
2209 ''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
2210 b_filename = to_bytes(filename, errors='surrogate_or_strict')
2211
2212 if not os.path.exists(b_filename):
2213 return None
2214 if os.path.isdir(b_filename):
2215 self.fail_jsonfail_json(msg="attempted to take checksum of directory: %s" % filename)
2216
2217 # preserve old behaviour where the third parameter was a hash algorithm object
2218 if hasattr(algorithm, 'hexdigest'):
2219 digest_method = algorithm
2220 else:
2221 try:
2222 digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
2223 except KeyError:
2224 self.fail_jsonfail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
2225 (filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
2226
2227 blocksize = 64 * 1024
2228 infile = open(os.path.realpath(b_filename), 'rb')
2229 block = infile.read(blocksize)
2230 while block:
2231 digest_method.update(block)
2232 block = infile.read(blocksize)
2233 infile.close()
2234 return digest_method.hexdigest()
2235
2236 def md5(self, filename):
2237 ''' Return MD5 hex digest of local file using digest_from_file().
2238
2239 Do not use this function unless you have no other choice for:
2240 1) Optional backwards compatibility
2241 2) Compatibility with a third party protocol
2242
2243 This function will not work on systems complying with FIPS-140-2.
2244
2245 Most uses of this function can use the module.sha1 function instead.
2246 '''
2247 if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
2248 raise ValueError('MD5 not available. Possibly running in FIPS mode')
2249 return self.digest_from_filedigest_from_file(filename, 'md5')
2250
2251 def sha1(self, filename):
2252 ''' Return SHA1 hex digest of local file using digest_from_file(). '''
2253 return self.digest_from_filedigest_from_file(filename, 'sha1')
2254
2255 def sha256(self, filename):
2256 ''' Return SHA-256 hex digest of local file using digest_from_file(). '''
2257 return self.digest_from_filedigest_from_file(filename, 'sha256')
2258
2259 def backup_local(self, fn):
2260 '''make a date-marked backup of the specified file, return True or False on success or failure'''
2261
2262 backupdest = ''
2263 if os.path.exists(fn):
2264 # backups named basename.PID.YYYY-MM-DD@HH:MM:SS~
2265 ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
2266 backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
2267
2268 try:
2269 self.preserved_copypreserved_copy(fn, backupdest)
2270 except (shutil.Error, IOError) as e:
2271 self.fail_jsonfail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, to_native(e)))
2272
2273 return backupdest
2274
2275 def cleanup(self, tmpfile):
2276 if os.path.exists(tmpfile):
2277 try:
2278 os.unlink(tmpfile)
2279 except OSError as e:
2280 sys.stderr.write("could not cleanup %s: %s" % (tmpfile, to_native(e)))
2281
2282 def preserved_copy(self, src, dest):
2283 """Copy a file with preserved ownership, permissions and context"""
2284
2285 # shutil.copy2(src, dst)
2286 # Similar to shutil.copy(), but metadata is copied as well - in fact,
2287 # this is just shutil.copy() followed by copystat(). This is similar
2288 # to the Unix command cp -p.
2289 #
2290 # shutil.copystat(src, dst)
2291 # Copy the permission bits, last access time, last modification time,
2292 # and flags from src to dst. The file contents, owner, and group are
2293 # unaffected. src and dst are path names given as strings.
2294
2295 shutil.copy2(src, dest)
2296
2297 # Set the context
2298 if self.selinux_enabledselinux_enabled():
2299 context = self.selinux_contextselinux_context(src)
2300 self.set_context_if_differentset_context_if_different(dest, context, False)
2301
2302 # chown it
2303 try:
2304 dest_stat = os.stat(src)
2305 tmp_stat = os.stat(dest)
2306 if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
2307 os.chown(dest, dest_stat.st_uid, dest_stat.st_gid)
2308 except OSError as e:
2309 if e.errno != errno.EPERM:
2310 raise
2311
2312 # Set the attributes
2313 current_attribs = self.get_file_attributesget_file_attributes(src)
2314 current_attribs = current_attribs.get('attr_flags', '')
2315 self.set_attributes_if_differentset_attributes_if_different(dest, current_attribs, True)
2316
2317 def atomic_move(self, src, dest, unsafe_writes=False):
2318 '''atomically move src to dest, copying attributes from dest, returns true on success
2319 it uses os.rename to ensure this as it is an atomic operation, rest of the function is
2320 to work around limitations, corner cases and ensure selinux context is saved if possible'''
2321 context = None
2322 dest_stat = None
2323 b_src = to_bytes(src, errors='surrogate_or_strict')
2324 b_dest = to_bytes(dest, errors='surrogate_or_strict')
2325 if os.path.exists(b_dest):
2326 try:
2327 dest_stat = os.stat(b_dest)
2328
2329 # copy mode and ownership
2330 os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
2331 os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
2332
2333 # try to copy flags if possible
2334 if hasattr(os, 'chflags') and hasattr(dest_stat, 'st_flags'):
2335 try:
2336 os.chflags(b_src, dest_stat.st_flags)
2337 except OSError as e:
2338 for err in 'EOPNOTSUPP', 'ENOTSUP':
2339 if hasattr(errno, err) and e.errno == getattr(errno, err):
2340 break
2341 else:
2342 raise
2343 except OSError as e:
2344 if e.errno != errno.EPERM:
2345 raise
2346 if self.selinux_enabledselinux_enabled():
2347 context = self.selinux_contextselinux_context(dest)
2348 else:
2349 if self.selinux_enabledselinux_enabled():
2350 context = self.selinux_default_contextselinux_default_context(dest)
2351
2352 creating = not os.path.exists(b_dest)
2353
2354 try:
2355 # Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
2356 os.rename(b_src, b_dest)
2357 except (IOError, OSError) as e:
2358 if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
2359 # only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
2360 # and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
2361 self.fail_jsonfail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
2362 else:
2363 # Use bytes here. In the shippable CI, this fails with
2364 # a UnicodeError with surrogateescape'd strings for an unknown
2365 # reason (doesn't happen in a local Ubuntu16.04 VM)
2366 b_dest_dir = os.path.dirname(b_dest)
2367 b_suffix = os.path.basename(b_dest)
2368 error_msg = None
2369 tmp_dest_name = None
2370 try:
2371 tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
2372 except (OSError, IOError) as e:
2373 error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
2374 except TypeError:
2375 # We expect that this is happening because python3.4.x and
2376 # below can't handle byte strings in mkstemp().
2377 # Traceback would end in something like:
2378 # file = _os.path.join(dir, pre + name + suf)
2379 # TypeError: can't concat bytes to str
2380 error_msg = ('Failed creating tmp file for atomic move. This usually happens when using Python3 less than Python3.5. '
2381 'Please use Python2.x or Python3.5 or greater.')
2382 finally:
2383 if error_msg:
2384 if unsafe_writes:
2385 self._unsafe_writes_unsafe_writes(b_src, b_dest)
2386 else:
2387 self.fail_jsonfail_json(msg=error_msg, exception=traceback.format_exc())
2388
2389 if tmp_dest_name:
2390 b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
2391
2392 try:
2393 try:
2394 # close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
2395 os.close(tmp_dest_fd)
2396 # leaves tmp file behind when sudo and not root
2397 try:
2398 shutil.move(b_src, b_tmp_dest_name)
2399 except OSError:
2400 # cleanup will happen by 'rm' of tmpdir
2401 # copy2 will preserve some metadata
2402 shutil.copy2(b_src, b_tmp_dest_name)
2403
2404 if self.selinux_enabledselinux_enabled():
2405 self.set_context_if_differentset_context_if_different(
2406 b_tmp_dest_name, context, False)
2407 try:
2408 tmp_stat = os.stat(b_tmp_dest_name)
2409 if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
2410 os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
2411 except OSError as e:
2412 if e.errno != errno.EPERM:
2413 raise
2414 try:
2415 os.rename(b_tmp_dest_name, b_dest)
2416 except (shutil.Error, OSError, IOError) as e:
2417 if unsafe_writes and e.errno == errno.EBUSY:
2418 self._unsafe_writes_unsafe_writes(b_tmp_dest_name, b_dest)
2419 else:
2420 self.fail_jsonfail_json(msg='Unable to make %s into to %s, failed final rename from %s: %s' %
2421 (src, dest, b_tmp_dest_name, to_native(e)), exception=traceback.format_exc())
2422 except (shutil.Error, OSError, IOError) as e:
2423 if unsafe_writes:
2424 self._unsafe_writes_unsafe_writes(b_src, b_dest)
2425 else:
2426 self.fail_jsonfail_json(msg='Failed to replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
2427 finally:
2428 self.cleanupcleanup(b_tmp_dest_name)
2429
2430 if creating:
2431 # make sure the file has the correct permissions
2432 # based on the current value of umask
2433 umask = os.umask(0)
2434 os.umask(umask)
2435 os.chmod(b_dest, DEFAULT_PERM & ~umask)
2436 try:
2437 os.chown(b_dest, os.geteuid(), os.getegid())
2438 except OSError:
2439 # We're okay with trying our best here. If the user is not
2440 # root (or old Unices) they won't be able to chown.
2441 pass
2442
2443 if self.selinux_enabledselinux_enabled():
2444 # rename might not preserve context
2445 self.set_context_if_differentset_context_if_different(dest, context, False)
2446
2447 def _unsafe_writes(self, src, dest):
2448 # sadly there are some situations where we cannot ensure atomicity, but only if
2449 # the user insists and we get the appropriate error we update the file unsafely
2450 try:
2451 out_dest = in_src = None
2452 try:
2453 out_dest = open(dest, 'wb')
2454 in_src = open(src, 'rb')
2455 shutil.copyfileobj(in_src, out_dest)
2456 finally: # assuring closed files in 2.4 compatible way
2457 if out_dest:
2458 out_dest.close()
2459 if in_src:
2460 in_src.close()
2461 except (shutil.Error, OSError, IOError) as e:
2462 self.fail_jsonfail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, to_native(e)),
2463 exception=traceback.format_exc())
2464
2465 def _clean_args(self, args):
2466
2467 if not self._clean_clean:
2468 # create a printable version of the command for use in reporting later,
2469 # which strips out things like passwords from the args list
2470 to_clean_args = args
2471 if PY2:
2472 if isinstance(args, text_type):
2473 to_clean_args = to_bytes(args)
2474 else:
2475 if isinstance(args, binary_type):
2476 to_clean_args = to_text(args)
2477 if isinstance(args, (text_type, binary_type)):
2478 to_clean_args = shlex.split(to_clean_args)
2479
2480 clean_args = []
2481 is_passwd = False
2482 for arg in (to_native(a) for a in to_clean_args):
2483 if is_passwd:
2484 is_passwd = False
2485 clean_args.append('********')
2486 continue
2487 if PASSWD_ARG_RE.match(arg):
2488 sep_idx = arg.find('=')
2489 if sep_idx > -1:
2490 clean_args.append('%s=********' % arg[:sep_idx])
2491 continue
2492 else:
2493 is_passwd = True
2494 arg = heuristic_log_sanitize(arg, self.no_log_valuesno_log_values)
2495 clean_args.append(arg)
2496 self._clean_clean = ' '.join(shlex_quote(arg) for arg in clean_args)
2497
2498 return self._clean_clean
2499
2501 # Reset SIGPIPE to SIG_DFL, otherwise in Python2.7 it gets ignored in subprocesses.
2502 if PY2 and sys.platform != 'win32':
2503 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
2504
2505 def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
2506 use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
2507 expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None):
2508 '''
2509 Execute a command, returns rc, stdout, and stderr.
2510
2511 :arg args: is the command to run
2512 * If args is a list, the command will be run with shell=False.
2513 * If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
2514 * If args is a string and use_unsafe_shell=True it runs with shell=True.
2515 :kw check_rc: Whether to call fail_json in case of non zero RC.
2516 Default False
2517 :kw close_fds: See documentation for subprocess.Popen(). Default True
2518 :kw executable: See documentation for subprocess.Popen(). Default None
2519 :kw data: If given, information to write to the stdin of the command
2520 :kw binary_data: If False, append a newline to the data. Default False
2521 :kw path_prefix: If given, additional path to find the command in.
2522 This adds to the PATH environment variable so helper commands in
2523 the same directory can also be found
2524 :kw cwd: If given, working directory to run the command inside
2525 :kw use_unsafe_shell: See `args` parameter. Default False
2526 :kw prompt_regex: Regex string (not a compiled regex) which can be
2527 used to detect prompts in the stdout which would otherwise cause
2528 the execution to hang (especially if no input data is specified)
2529 :kw environ_update: dictionary to *update* os.environ with
2530 :kw umask: Umask to be used when running the command. Default None
2531 :kw encoding: Since we return native strings, on python3 we need to
2532 know the encoding to use to transform from bytes to text. If you
2533 want to always get bytes back, use encoding=None. The default is
2534 "utf-8". This does not affect transformation of strings given as
2535 args.
2536 :kw errors: Since we return native strings, on python3 we need to
2537 transform stdout and stderr from bytes to text. If the bytes are
2538 undecodable in the ``encoding`` specified, then use this error
2539 handler to deal with them. The default is ``surrogate_or_strict``
2540 which means that the bytes will be decoded using the
2541 surrogateescape error handler if available (available on all
2542 python3 versions we support) otherwise a UnicodeError traceback
2543 will be raised. This does not affect transformations of strings
2544 given as args.
2545 :kw expand_user_and_vars: When ``use_unsafe_shell=False`` this argument
2546 dictates whether ``~`` is expanded in paths and environment variables
2547 are expanded before running the command. When ``True`` a string such as
2548 ``$SHELL`` will be expanded regardless of escaping. When ``False`` and
2549 ``use_unsafe_shell=False`` no path or variable expansion will be done.
2550 :kw pass_fds: When running on Python 3 this argument
2551 dictates which file descriptors should be passed
2552 to an underlying ``Popen`` constructor. On Python 2, this will
2553 set ``close_fds`` to False.
2554 :kw before_communicate_callback: This function will be called
2555 after ``Popen`` object will be created
2556 but before communicating to the process.
2557 (``Popen`` object will be passed to callback as a first argument)
2558 :returns: A 3-tuple of return code (integer), stdout (native string),
2559 and stderr (native string). On python2, stdout and stderr are both
2560 byte strings. On python3, stdout and stderr are text strings converted
2561 according to the encoding and errors parameters. If you want byte
2562 strings on python3, use encoding=None to turn decoding to text off.
2563 '''
2564 # used by clean args later on
2565 self._clean_clean = None
2566
2567 if not isinstance(args, (list, binary_type, text_type)):
2568 msg = "Argument 'args' to run_command must be list or string"
2569 self.fail_jsonfail_json(rc=257, cmd=args, msg=msg)
2570
2571 shell = False
2572 if use_unsafe_shell:
2573
2574 # stringify args for unsafe/direct shell usage
2575 if isinstance(args, list):
2576 args = b" ".join([to_bytes(shlex_quote(x), errors='surrogate_or_strict') for x in args])
2577 else:
2578 args = to_bytes(args, errors='surrogate_or_strict')
2579
2580 # not set explicitly, check if set by controller
2581 if executable:
2582 executable = to_bytes(executable, errors='surrogate_or_strict')
2583 args = [executable, b'-c', args]
2584 elif self._shell_shell not in (None, '/bin/sh'):
2585 args = [to_bytes(self._shell_shell, errors='surrogate_or_strict'), b'-c', args]
2586 else:
2587 shell = True
2588 else:
2589 # ensure args are a list
2590 if isinstance(args, (binary_type, text_type)):
2591 # On python2.6 and below, shlex has problems with text type
2592 # On python3, shlex needs a text type.
2593 if PY2:
2594 args = to_bytes(args, errors='surrogate_or_strict')
2595 elif PY3:
2596 args = to_text(args, errors='surrogateescape')
2597 args = shlex.split(args)
2598
2599 # expand ``~`` in paths, and all environment vars
2600 if expand_user_and_vars:
2601 args = [to_bytes(os.path.expanduser(os.path.expandvars(x)), errors='surrogate_or_strict') for x in args if x is not None]
2602 else:
2603 args = [to_bytes(x, errors='surrogate_or_strict') for x in args if x is not None]
2604
2605 prompt_re = None
2606 if prompt_regex:
2607 if isinstance(prompt_regex, text_type):
2608 if PY3:
2609 prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
2610 elif PY2:
2611 prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
2612 try:
2613 prompt_re = re.compile(prompt_regex, re.MULTILINE)
2614 except re.error:
2615 self.fail_jsonfail_json(msg="invalid prompt regular expression given to run_command")
2616
2617 rc = 0
2618 msg = None
2619 st_in = None
2620
2621 # Manipulate the environ we'll send to the new process
2622 old_env_vals = {}
2623 # We can set this from both an attribute and per call
2624 for key, val in self.run_command_environ_updaterun_command_environ_update.items():
2625 old_env_vals[key] = os.environ.get(key, None)
2626 os.environ[key] = val
2627 if environ_update:
2628 for key, val in environ_update.items():
2629 old_env_vals[key] = os.environ.get(key, None)
2630 os.environ[key] = val
2631 if path_prefix:
2632 old_env_vals['PATH'] = os.environ['PATH']
2633 os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH'])
2634
2635 # If using test-module.py and explode, the remote lib path will resemble:
2636 # /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
2637 # If using ansible or ansible-playbook with a remote system:
2638 # /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
2639
2640 # Clean out python paths set by ansiballz
2641 if 'PYTHONPATH' in os.environ:
2642 pypaths = os.environ['PYTHONPATH'].split(':')
2643 pypaths = [x for x in pypaths
2644 if not x.endswith('/ansible_modlib.zip') and
2645 not x.endswith('/debug_dir')]
2646 os.environ['PYTHONPATH'] = ':'.join(pypaths)
2647 if not os.environ['PYTHONPATH']:
2648 del os.environ['PYTHONPATH']
2649
2650 if data:
2651 st_in = subprocess.PIPE
2652
2653 kwargs = dict(
2654 executable=executable,
2655 shell=shell,
2656 close_fds=close_fds,
2657 stdin=st_in,
2658 stdout=subprocess.PIPE,
2659 stderr=subprocess.PIPE,
2660 preexec_fn=self._restore_signal_handlers_restore_signal_handlers,
2661 )
2662 if PY3 and pass_fds:
2663 kwargs["pass_fds"] = pass_fds
2664 elif PY2 and pass_fds:
2665 kwargs['close_fds'] = False
2666
2667 # store the pwd
2668 prev_dir = os.getcwd()
2669
2670 # make sure we're in the right working directory
2671 if cwd and os.path.isdir(cwd):
2672 cwd = to_bytes(os.path.abspath(os.path.expanduser(cwd)), errors='surrogate_or_strict')
2673 kwargs['cwd'] = cwd
2674 try:
2675 os.chdir(cwd)
2676 except (OSError, IOError) as e:
2677 self.fail_jsonfail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, to_native(e)),
2678 exception=traceback.format_exc())
2679
2680 old_umask = None
2681 if umask:
2682 old_umask = os.umask(umask)
2683
2684 try:
2685 if self._debug_debug:
2686 self.loglog('Executing: ' + self._clean_args_clean_args(args))
2687 cmd = subprocess.Popen(args, **kwargs)
2688 if before_communicate_callback:
2689 before_communicate_callback(cmd)
2690
2691 # the communication logic here is essentially taken from that
2692 # of the _communicate() function in ssh.py
2693
2694 stdout = b''
2695 stderr = b''
2696 try:
2697 selector = selectors.DefaultSelector()
2698 except OSError:
2699 # Failed to detect default selector for the given platform
2700 # Select PollSelector which is supported by major platforms
2701 selector = selectors.PollSelector()
2702
2703 selector.register(cmd.stdout, selectors.EVENT_READ)
2704 selector.register(cmd.stderr, selectors.EVENT_READ)
2705 if os.name == 'posix':
2706 fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
2707 fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
2708
2709 if data:
2710 if not binary_data:
2711 data += '\n'
2712 if isinstance(data, text_type):
2713 data = to_bytes(data)
2714 cmd.stdin.write(data)
2715 cmd.stdin.close()
2716
2717 while True:
2718 events = selector.select(1)
2719 for key, event in events:
2720 b_chunk = key.fileobj.read()
2721 if b_chunk == b(''):
2722 selector.unregister(key.fileobj)
2723 if key.fileobj == cmd.stdout:
2724 stdout += b_chunk
2725 elif key.fileobj == cmd.stderr:
2726 stderr += b_chunk
2727 # if we're checking for prompts, do it now
2728 if prompt_re:
2729 if prompt_re.search(stdout) and not data:
2730 if encoding:
2731 stdout = to_native(stdout, encoding=encoding, errors=errors)
2732 return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
2733 # only break out if no pipes are left to read or
2734 # the pipes are completely read and
2735 # the process is terminated
2736 if (not events or not selector.get_map()) and cmd.poll() is not None:
2737 break
2738 # No pipes are left to read but process is not yet terminated
2739 # Only then it is safe to wait for the process to be finished
2740 # NOTE: Actually cmd.poll() is always None here if no selectors are left
2741 elif not selector.get_map() and cmd.poll() is None:
2742 cmd.wait()
2743 # The process is terminated. Since no pipes to read from are
2744 # left, there is no need to call select() again.
2745 break
2746
2747 cmd.stdout.close()
2748 cmd.stderr.close()
2749 selector.close()
2750
2751 rc = cmd.returncode
2752 except (OSError, IOError) as e:
2753 self.loglog("Error Executing CMD:%s Exception:%s" % (self._clean_args_clean_args(args), to_native(e)))
2754 self.fail_jsonfail_json(rc=e.errno, msg=to_native(e), cmd=self._clean_args_clean_args(args))
2755 except Exception as e:
2756 self.loglog("Error Executing CMD:%s Exception:%s" % (self._clean_args_clean_args(args), to_native(traceback.format_exc())))
2757 self.fail_jsonfail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=self._clean_args_clean_args(args))
2758
2759 # Restore env settings
2760 for key, val in old_env_vals.items():
2761 if val is None:
2762 del os.environ[key]
2763 else:
2764 os.environ[key] = val
2765
2766 if old_umask:
2767 os.umask(old_umask)
2768
2769 if rc != 0 and check_rc:
2770 msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_valuesno_log_values)
2771 self.fail_jsonfail_json(cmd=self._clean_args_clean_args(args), rc=rc, stdout=stdout, stderr=stderr, msg=msg)
2772
2773 # reset the pwd
2774 os.chdir(prev_dir)
2775
2776 if encoding is not None:
2777 return (rc, to_native(stdout, encoding=encoding, errors=errors),
2778 to_native(stderr, encoding=encoding, errors=errors))
2779
2780 return (rc, stdout, stderr)
2781
2782 def append_to_file(self, filename, str):
2783 filename = os.path.expandvars(os.path.expanduser(filename))
2784 fh = open(filename, 'a')
2785 fh.write(str)
2786 fh.close()
2787
2788 def bytes_to_human(self, size):
2789 return bytes_to_human(size)
2790
2791 # for backwards compatibility
2792 pretty_bytes = bytes_to_human
2793
2794 def human_to_bytes(self, number, isbits=False):
2795 return human_to_bytes(number, isbits)
2796
2797 #
2798 # Backwards compat
2799 #
2800
2801 # In 2.0, moved from inside the module to the toplevel
2802 is_executable = is_executable
2803
2804 @staticmethod
2806 try:
2807 # 1032 == FZ_GETPIPE_SZ
2808 buffer_size = fcntl.fcntl(fd, 1032)
2809 except Exception:
2810 try:
2811 # not as exact as above, but should be good enough for most platforms that fail the previous call
2812 buffer_size = select.PIPE_BUF
2813 except Exception:
2814 buffer_size = 9000 # use sane default JIC
2815
2816 return buffer_size
2817
2818
2820 return os.path.dirname(os.path.realpath(__file__))
def _count_terms(self, check, param=None)
Definition: basic.py:1598
def selinux_default_context(self, path, mode=0)
Definition: basic.py:941
def set_mode_if_different(self, path, mode, changed, diff=None, expand=True)
Definition: basic.py:1154
def _set_defaults(self, pre=True, spec=None, param=None)
Definition: basic.py:1924
def set_owner_if_different(self, path, owner, changed, diff=None, expand=True)
Definition: basic.py:1072
def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True)
Definition: basic.py:1452
def _set_fallbacks(self, spec=None, param=None)
Definition: basic.py:1941
def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode)
Definition: basic.py:1298
def _handle_no_log_values(self, spec=None, param=None)
Definition: basic.py:1544
def fail_on_missing_params(self, required_params=None)
Definition: basic.py:2200
def _check_required_together(self, spec, param=None)
Definition: basic.py:1630
def is_special_selinux_path(self, path)
Definition: basic.py:997
def preserved_copy(self, src, dest)
Definition: basic.py:2282
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True)
Definition: basic.py:1430
def _check_arguments(self, check_invalid_arguments, spec=None, param=None, legal_inputs=None)
Definition: basic.py:1557
def set_context_if_different(self, path, context, changed, diff=None)
Definition: basic.py:1027
def _get_wanted_type(self, wanted, k)
Definition: basic.py:1855
def atomic_move(self, src, dest, unsafe_writes=False)
Definition: basic.py:2317
def deprecate(self, msg, version=None, date=None, collection_name=None)
Definition: basic.py:847
def __init__(self, argument_spec, bypass_checks=False, no_log=False, check_invalid_arguments=None, mutually_exclusive=None, required_together=None, required_one_of=None, add_file_common_args=False, supports_check_mode=False, required_if=None, required_by=None)
Definition: basic.py:675
def load_file_common_arguments(self, params)
Definition: basic.py:860
def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask)
Definition: basic.py:1362
def _unsafe_writes(self, src, dest)
Definition: basic.py:2447
def _check_required_by(self, spec, param=None)
Definition: basic.py:1644
def get_bin_path(self, arg, required=False, opt_dirs=None)
Definition: basic.py:2092
def set_default_selinux_context(self, path, changed)
Definition: basic.py:1021
def _check_argument_values(self, spec=None, param=None)
Definition: basic.py:1684
def _check_required_if(self, spec, param=None)
Definition: basic.py:1669
def log(self, msg, log_args=None)
Definition: basic.py:1987
def human_to_bytes(self, number, isbits=False)
Definition: basic.py:2794
def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True)
Definition: basic.py:1238
def check_file_absent_if_check_mode(self, file_path)
Definition: basic.py:1449
def _check_argument_types(self, spec=None, param=None)
Definition: basic.py:1887
def _handle_elements(self, wanted, param, values)
Definition: basic.py:1873
def set_group_if_different(self, path, group, changed, diff=None, expand=True)
Definition: basic.py:1113
def append_to_file(self, filename, str)
Definition: basic.py:2782
def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode)
Definition: basic.py:1343
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict', expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None)
Definition: basic.py:2507
def _check_required_one_of(self, spec, param=None)
Definition: basic.py:1615
def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True)
Definition: basic.py:1455
def user_and_group(self, path, expand=True)
Definition: basic.py:974
def safe_eval(self, value, locals=None, include_exceptions=False)
Definition: basic.py:1735
def _check_mutually_exclusive(self, spec, param=None)
Definition: basic.py:1603
def digest_from_file(self, filename, algorithm)
Definition: basic.py:2208
def _handle_options(self, argument_spec=None, params=None, prefix='')
Definition: basic.py:1790
def _check_required_arguments(self, spec=None, param=None)
Definition: basic.py:1655
def _handle_aliases(self, spec=None, param=None, option_prefix='')
Definition: basic.py:1519
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr')
Definition: _text.py:52
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr')
Definition: _text.py:169
def _remove_values_conditions(value, no_log_strings, deferred_removals)
Definition: basic.py:326
def missing_required_lib(library, reason=None, url=None)
Definition: basic.py:653
def load_platform_subclass(cls, *args, **kwargs)
Definition: basic.py:312
def get_all_subclasses(cls)
Definition: basic.py:318
def sanitize_keys(obj, no_log_strings, ignore_keys=frozenset())
Definition: basic.py:488
def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals)
Definition: basic.py:450
def remove_values(value, no_log_strings)
Definition: basic.py:417
def heuristic_log_sanitize(data, no_log_values=None)
Definition: basic.py:532
def env_fallback(*args, **kwargs)
Definition: basic.py:232
def format_attributes(attributes)
Definition: file.py:89
def list_no_log_values(argument_spec, params)
Definition: parameters.py:71
def list_deprecations(argument_spec, params, prefix='')
Definition: parameters.py:118
def handle_aliases(argument_spec, params, alias_warnings=None)
Definition: parameters.py:156
def check_required_one_of(terms, module_parameters)
Definition: validation.py:71
def check_mutually_exclusive(terms, module_parameters)
Definition: validation.py:42
def check_required_by(requirements, module_parameters)
Definition: validation.py:133
def check_required_if(requirements, module_parameters)
Definition: validation.py:198
def check_required_together(terms, module_parameters)
Definition: validation.py:101
def check_type_str(value, allow_conversion=True)
Definition: validation.py:335
def check_missing_parameters(module_parameters, required_parameters=None)
Definition: validation.py:280
def check_required_arguments(argument_spec, module_parameters)
Definition: validation.py:169
def count_terms(terms, module_parameters)
Definition: validation.py:26
def get(network_os, module_name, connection_type)
Definition: providers.py:36
def update(client, current_stream, stream_name, number_of_shards=1, retention_period=None, tags=None, wait=False, wait_timeout=300, check_mode=False)