"Fossies" - the Fresh Open Source Software Archive

Member "tacker-2.0.0/tacker/common/utils.py" (16 Oct 2019, 15185 Bytes) of package /linux/misc/openstack/tacker-2.0.0.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "utils.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 1.0.0_vs_2.0.0.

    1 # Copyright 2011, VMware, Inc.
    2 # All Rights Reserved.
    3 #
    4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
    5 #    not use this file except in compliance with the License. You may obtain
    6 #    a copy of the License at
    7 #
    8 #         http://www.apache.org/licenses/LICENSE-2.0
    9 #
   10 #    Unless required by applicable law or agreed to in writing, software
   11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
   12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
   13 #    License for the specific language governing permissions and limitations
   14 #    under the License.
   15 #
   16 # Borrowed from nova code base, more utilities will be added/borrowed as and
   17 # when needed.
   18 
   19 """Utilities and helper functions."""
   20 
   21 import functools
   22 import inspect
   23 import logging as std_logging
   24 import os
   25 import random
   26 import re
   27 import signal
   28 import socket
   29 import string
   30 import sys
   31 
   32 from eventlet.green import subprocess
   33 import netaddr
   34 from oslo_concurrency import lockutils
   35 from oslo_config import cfg
   36 from oslo_log import log as logging
   37 from oslo_utils import excutils
   38 from oslo_utils import importutils
   39 from stevedore import driver
   40 try:
   41     from eventlet import sleep
   42 except ImportError:
   43     from time import sleep
   44 
   45 from tacker._i18n import _
   46 from tacker.common import constants as q_const
   47 from tacker.common import exceptions
   48 from tacker.common import safe_utils
   49 
   50 
   51 TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
   52 LOG = logging.getLogger(__name__)
   53 SYNCHRONIZED_PREFIX = 'tacker-'
   54 MEM_UNITS = {
   55     "MB": {
   56         "MB": {
   57             "op": "*",
   58             "val": "1"
   59         },
   60         "GB": {
   61             "op": "/",
   62             "val": "1024"
   63         }
   64     },
   65     "GB": {
   66         "MB": {
   67             "op": "*",
   68             "val": "1024"
   69         },
   70         "GB": {
   71             "op": "*",
   72             "val": "1"
   73         }
   74     }
   75 }
   76 CONF = cfg.CONF
   77 synchronized = lockutils.synchronized_with_prefix(SYNCHRONIZED_PREFIX)
   78 MAX_COOP_READER_BUFFER_SIZE = 134217728
   79 
   80 if hasattr(inspect, 'getfullargspec'):
   81     getargspec = inspect.getfullargspec
   82 else:
   83     getargspec = inspect.getargspec
   84 
   85 
   86 def find_config_file(options, config_file):
   87     """Return the first config file found.
   88 
   89     We search for the paste config file in the following order:
   90     * If --config-file option is used, use that
   91     * Search for the configuration files via common cfg directories
   92     :retval Full path to config file, or None if no config file found
   93     """
   94     fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
   95     if options.get('config_file'):
   96         if os.path.exists(options['config_file']):
   97             return fix_path(options['config_file'])
   98 
   99     dir_to_common = os.path.dirname(os.path.abspath(__file__))
  100     root = os.path.join(dir_to_common, '..', '..', '..', '..')
  101     # Handle standard directory search for the config file
  102     config_file_dirs = [fix_path(os.path.join(os.getcwd(), 'etc')),
  103                         fix_path(os.path.join('~', '.tacker-venv', 'etc',
  104                                               'tacker')),
  105                         fix_path('~'),
  106                         os.path.join(cfg.CONF.state_path, 'etc'),
  107                         os.path.join(cfg.CONF.state_path, 'etc', 'tacker'),
  108                         fix_path(os.path.join('~', '.local',
  109                                               'etc', 'tacker')),
  110                         '/usr/etc/tacker',
  111                         '/usr/local/etc/tacker',
  112                         '/etc/tacker/',
  113                         '/etc']
  114 
  115     if 'plugin' in options:
  116         config_file_dirs = [
  117             os.path.join(x, 'tacker', 'plugins', options['plugin'])
  118             for x in config_file_dirs
  119         ]
  120 
  121     if os.path.exists(os.path.join(root, 'plugins')):
  122         plugins = [fix_path(os.path.join(root, 'plugins', p, 'etc'))
  123                    for p in os.listdir(os.path.join(root, 'plugins'))]
  124         plugins = [p for p in plugins if os.path.isdir(p)]
  125         config_file_dirs.extend(plugins)
  126 
  127     for cfg_dir in config_file_dirs:
  128         cfg_file = os.path.join(cfg_dir, config_file)
  129         if os.path.exists(cfg_file):
  130             return cfg_file
  131 
  132 
  133 def _subprocess_setup():
  134     # Python installs a SIGPIPE handler by default. This is usually not what
  135     # non-Python subprocesses expect.
  136     signal.signal(signal.SIGPIPE, signal.SIG_DFL)
  137 
  138 
  139 def subprocess_popen(args, stdin=None, stdout=None, stderr=None, shell=False,
  140                      env=None):
  141     return subprocess.Popen(args, shell=shell, stdin=stdin, stdout=stdout,
  142                             stderr=stderr, preexec_fn=_subprocess_setup,
  143                             close_fds=True, env=env)
  144 
  145 
  146 def get_hostname():
  147     return socket.gethostname()
  148 
  149 
  150 def dict2tuple(d):
  151     items = list(d.items())
  152     items.sort()
  153     return tuple(items)
  154 
  155 
  156 def log_opt_values(log):
  157     cfg.CONF.log_opt_values(log, std_logging.DEBUG)
  158 
  159 
  160 def is_valid_vlan_tag(vlan):
  161     return q_const.MIN_VLAN_TAG <= vlan <= q_const.MAX_VLAN_TAG
  162 
  163 
  164 def is_valid_ipv4(address):
  165     """Verify that address represents a valid IPv4 address."""
  166     try:
  167         return netaddr.valid_ipv4(address)
  168     except Exception:
  169         return False
  170 
  171 
  172 def change_memory_unit(mem, to):
  173     """Change the memory value(mem) based on the unit('to') specified.
  174 
  175     If the unit is not specified in 'mem', by default, it is considered
  176     as "MB". And this method returns only integer.
  177     """
  178     mem = str(mem) + " MB" if str(mem).isdigit() else mem.upper()
  179     for unit, value in (MEM_UNITS).items():
  180         mem_arr = mem.split(unit)
  181         if len(mem_arr) < 2:
  182             continue
  183         return eval(mem_arr[0] +
  184                     MEM_UNITS[unit][to]["op"] +
  185                     MEM_UNITS[unit][to]["val"])
  186 
  187 
  188 def load_class_by_alias_or_classname(namespace, name):
  189     """Load class using stevedore alias or the class name.
  190 
  191     Load class using the stevedore driver manager
  192     :param namespace: namespace where the alias is defined
  193     :param name: alias or class name of the class to be loaded
  194     :returns: class if calls can be loaded
  195     :raises ImportError: if class cannot be loaded
  196     """
  197     if not name:
  198         LOG.error("Alias or class name is not set")
  199         raise ImportError(_("Class not found."))
  200     try:
  201         # Try to resolve class by alias
  202         mgr = driver.DriverManager(namespace, name)
  203         class_to_load = mgr.driver
  204     except RuntimeError:
  205         e1_info = sys.exc_info()
  206         # Fallback to class name
  207         try:
  208             class_to_load = importutils.import_class(name)
  209         except (ImportError, ValueError):
  210             LOG.error("Error loading class by alias",
  211                       exc_info=e1_info)
  212             LOG.error("Error loading class by class name",
  213                       exc_info=True)
  214             raise ImportError(_("Class not found."))
  215     return class_to_load
  216 
  217 
  218 def deep_update(orig_dict, new_dict):
  219     for key, value in new_dict.items():
  220         if isinstance(value, dict):
  221             if key in orig_dict and isinstance(orig_dict[key], dict):
  222                 deep_update(orig_dict[key], value)
  223                 continue
  224 
  225         orig_dict[key] = value
  226 
  227 
  228 def generate_resource_name(resource, prefix='tmpl'):
  229     return prefix + '-' \
  230         + ''.join(random.SystemRandom().choice(
  231                   string.ascii_lowercase + string.digits)
  232           for _ in range(16)) \
  233         + '-' + resource
  234 
  235 
  236 def get_auth_url_v3(auth_url):
  237     if re.match('.+v3/?$', auth_url) is not None:
  238         return auth_url
  239     else:
  240         return '{0}/v3'.format(auth_url)
  241 
  242 
  243 def none_from_string(orig_str):
  244     none_values = ['', 'None', 'NONE', 'null', 'NULL']
  245     if orig_str in none_values:
  246         return None
  247     else:
  248         return orig_str
  249 
  250 
  251 def expects_func_args(*args):
  252     def _decorator_checker(dec):
  253         @functools.wraps(dec)
  254         def _decorator(f):
  255             base_f = safe_utils.get_wrapped_function(f)
  256             argspec = getargspec(base_f)
  257             if argspec[1] or argspec[2] or set(args) <= set(argspec[0]):
  258                 # NOTE (nirajsingh): We can't really tell if correct stuff will
  259                 # be passed if it's a function with *args or **kwargs so
  260                 # we still carry on and hope for the best
  261                 return dec(f)
  262             else:
  263                 raise TypeError("Decorated function %(f_name)s does not "
  264                                 "have the arguments expected by the "
  265                                 "decorator %(d_name)s" %
  266                                 {'f_name': base_f.__name__,
  267                                  'd_name': dec.__name__})
  268 
  269         return _decorator
  270 
  271     return _decorator_checker
  272 
  273 
  274 def cooperative_iter(iter):
  275     """Prevent eventlet thread starvation during iteration
  276 
  277     Return an iterator which schedules after each
  278     iteration. This can prevent eventlet thread starvation.
  279 
  280     :param iter: an iterator to wrap
  281     """
  282     try:
  283         for chunk in iter:
  284             sleep(0)
  285             yield chunk
  286     except Exception as err:
  287         with excutils.save_and_reraise_exception():
  288             msg = _("Error: cooperative_iter exception %s") % err
  289             LOG.error(msg)
  290 
  291 
  292 def cooperative_read(fd):
  293     """Prevent eventlet thread starvationafter each read operation.
  294 
  295     Wrap a file descriptor's read with a partial function which schedules
  296     after each read. This can prevent eventlet thread starvation.
  297 
  298     :param fd: a file descriptor to wrap
  299     """
  300     def readfn(*args):
  301         result = fd.read(*args)
  302         sleep(0)
  303         return result
  304     return readfn
  305 
  306 
  307 def chunkreadable(iter, chunk_size=65536):
  308     """Wrap a readable iterator.
  309 
  310     Wrap a readable iterator with a reader yielding chunks of
  311     a preferred size, otherwise leave iterator unchanged.
  312 
  313     :param iter: an iter which may also be readable
  314     :param chunk_size: maximum size of chunk
  315     """
  316     return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
  317 
  318 
  319 def chunkiter(fp, chunk_size=65536):
  320     """Convert iterator to a file-like object.
  321 
  322     Return an iterator to a file-like obj which yields fixed size chunks
  323 
  324     :param fp: a file-like object
  325     :param chunk_size: maximum size of chunk
  326     """
  327     while True:
  328         chunk = fp.read(chunk_size)
  329         if chunk:
  330             yield chunk
  331         else:
  332             break
  333 
  334 
  335 class CooperativeReader(object):
  336     """An eventlet thread friendly class for reading in image data.
  337 
  338     When accessing data either through the iterator or the read method
  339     we perform a sleep to allow a co-operative yield. When there is more than
  340     one image being uploaded/downloaded this prevents eventlet thread
  341     starvation, ie allows all threads to be scheduled periodically rather than
  342     having the same thread be continuously active.
  343     """
  344     def __init__(self, fd):
  345         """Construct an CooperativeReader object.
  346 
  347         :param fd: Underlying image file object
  348 
  349         """
  350         self.fd = fd
  351         self.iterator = None
  352         # NOTE(nirajsingh): if the underlying supports read(), overwrite the
  353         # default iterator-based implementation with cooperative_read which
  354         # is more straightforward
  355         if hasattr(fd, 'read'):
  356             self.read = cooperative_read(fd)
  357         else:
  358             self.iterator = None
  359             self.buffer = b''
  360             self.position = 0
  361 
  362     def read(self, length=None):
  363         """Return the requested amount of bytes.
  364 
  365         Fetching the next chunk of the underlying iterator when needed.
  366         This is replaced with cooperative_read in __init__ if the underlying
  367         fd already supports read().
  368 
  369         """
  370 
  371         if length is None:
  372             if len(self.buffer) - self.position > 0:
  373                 # if no length specified but some data exists in buffer,
  374                 # return that data and clear the buffer
  375                 result = self.buffer[self.position:]
  376                 self.buffer = b''
  377                 self.position = 0
  378                 return bytes(result)
  379             else:
  380                 # otherwise read the next chunk from the underlying iterator
  381                 # and return it as a whole. Reset the buffer, as subsequent
  382                 # calls may specify the length
  383                 try:
  384                     if self.iterator is None:
  385                         self.iterator = self.__iter__()
  386                     return next(self.iterator)
  387                 except StopIteration:
  388                     return b''
  389                 finally:
  390                     self.buffer = b''
  391                     self.position = 0
  392         else:
  393             result = bytearray()
  394             while len(result) < length:
  395                 if self.position < len(self.buffer):
  396                     to_read = length - len(result)
  397                     chunk = self.buffer[self.position:self.position + to_read]
  398                     result.extend(chunk)
  399 
  400                     # This check is here to prevent potential OOM issues if
  401                     # this code is called with unreasonably high values of read
  402                     # size. Currently it is only called from the HTTP clients
  403                     # of Glance backend stores, which use httplib for data
  404                     # streaming, which has readsize hardcoded to 8K, so this
  405                     # check should never fire. Regardless it still worths to
  406                     # make the check, as the code may be reused somewhere else.
  407                     if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
  408                         raise exceptions.LimitExceeded()
  409                     self.position += len(chunk)
  410                 else:
  411                     try:
  412                         if self.iterator is None:
  413                             self.iterator = self.__iter__()
  414                         self.buffer = next(self.iterator)
  415                         self.position = 0
  416                     except StopIteration:
  417                         self.buffer = b''
  418                         self.position = 0
  419                         return bytes(result)
  420             return bytes(result)
  421 
  422     def __iter__(self):
  423         return cooperative_iter(self.fd.__iter__())
  424 
  425 
  426 class LimitingReader(object):
  427     """Limit Reader to read data past to configured allowed amount.
  428 
  429     Reader designed to fail when reading image data past the configured
  430     allowable amount.
  431     """
  432     def __init__(self, data, limit,
  433                  exception_class=exceptions.CSARFileSizeLimitExceeded):
  434         """Construct an LimitingReader object.
  435 
  436         :param data: Underlying image data object
  437         :param limit: maximum number of bytes the reader should allow
  438         :param exception_class: Type of exception to be raised
  439         """
  440         self.data = data
  441         self.limit = limit
  442         self.bytes_read = 0
  443         self.exception_class = exception_class
  444 
  445     def __iter__(self):
  446         for chunk in self.data:
  447             self.bytes_read += len(chunk)
  448             if self.bytes_read > self.limit:
  449                 raise self.exception_class()
  450             else:
  451                 yield chunk
  452 
  453     def read(self, i):
  454         result = self.data.read(i)
  455         self.bytes_read += len(result)
  456         if self.bytes_read > self.limit:
  457             raise self.exception_class()
  458         return result