"Fossies" - the Fresh Open Source Software Archive

Member "keystone-17.0.0/keystone/common/sql/core.py" (13 May 2020, 20925 Bytes) of package /linux/misc/openstack/keystone-17.0.0.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "core.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 16.0.1_vs_17.0.0.

    1 # Copyright 2012 OpenStack Foundation
    2 #
    3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
    4 # not use this file except in compliance with the License. You may obtain
    5 # a copy of the License at
    6 #
    7 #      http://www.apache.org/licenses/LICENSE-2.0
    8 #
    9 # Unless required by applicable law or agreed to in writing, software
   10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
   11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
   12 # License for the specific language governing permissions and limitations
   13 # under the License.
   14 
   15 """SQL backends for the various services.
   16 
   17 Before using this module, call initialize(). This has to be done before
   18 CONF() because it sets up configuration options.
   19 
   20 """
   21 import datetime
   22 import functools
   23 import pytz
   24 
   25 from oslo_db import exception as db_exception
   26 from oslo_db import options as db_options
   27 from oslo_db.sqlalchemy import enginefacade
   28 from oslo_db.sqlalchemy import models
   29 from oslo_log import log
   30 from oslo_serialization import jsonutils
   31 from oslo_utils import timeutils
   32 from osprofiler import opts as profiler
   33 import osprofiler.sqlalchemy
   34 import sqlalchemy as sql
   35 from sqlalchemy.ext import declarative
   36 from sqlalchemy.orm.attributes import flag_modified, InstrumentedAttribute
   37 from sqlalchemy import types as sql_types
   38 
   39 from keystone.common import driver_hints
   40 from keystone.common import utils
   41 import keystone.conf
   42 from keystone import exception
   43 from keystone.i18n import _
   44 
   45 
   46 CONF = keystone.conf.CONF
   47 LOG = log.getLogger(__name__)
   48 
   49 ModelBase = declarative.declarative_base()
   50 
   51 
   52 # For exporting to other modules
   53 Column = sql.Column
   54 Index = sql.Index
   55 String = sql.String
   56 Integer = sql.Integer
   57 Enum = sql.Enum
   58 ForeignKey = sql.ForeignKey
   59 DateTime = sql.DateTime
   60 Date = sql.Date
   61 TIMESTAMP = sql.TIMESTAMP
   62 IntegrityError = sql.exc.IntegrityError
   63 DBDuplicateEntry = db_exception.DBDuplicateEntry
   64 OperationalError = sql.exc.OperationalError
   65 NotFound = sql.orm.exc.NoResultFound
   66 Boolean = sql.Boolean
   67 Text = sql.Text
   68 UniqueConstraint = sql.UniqueConstraint
   69 PrimaryKeyConstraint = sql.PrimaryKeyConstraint
   70 joinedload = sql.orm.joinedload
   71 # Suppress flake8's unused import warning for flag_modified:
   72 flag_modified = flag_modified
   73 Unicode = sql.Unicode
   74 
   75 
   76 def initialize():
   77     """Initialize the module."""
   78     db_options.set_defaults(
   79         CONF,
   80         connection="sqlite:///keystone.db")
   81     # Configure OSprofiler options
   82     profiler.set_defaults(CONF, enabled=False, trace_sqlalchemy=False)
   83 
   84 
   85 def initialize_decorator(init):
   86     """Ensure that the length of string field do not exceed the limit.
   87 
   88     This decorator check the initialize arguments, to make sure the
   89     length of string field do not exceed the length limit, or raise a
   90     'StringLengthExceeded' exception.
   91 
   92     Use decorator instead of inheritance, because the metaclass will
   93     check the __tablename__, primary key columns, etc. at the class
   94     definition.
   95 
   96     """
   97     def initialize(self, *args, **kwargs):
   98         cls = type(self)
   99         for k, v in kwargs.items():
  100             if hasattr(cls, k):
  101                 attr = getattr(cls, k)
  102                 if isinstance(attr, InstrumentedAttribute):
  103                     column = attr.property.columns[0]
  104                     if isinstance(column.type, String):
  105                         if not isinstance(v, str):
  106                             v = str(v)
  107                         if column.type.length and column.type.length < len(v):
  108                             raise exception.StringLengthExceeded(
  109                                 string=v, type=k, length=column.type.length)
  110 
  111         init(self, *args, **kwargs)
  112     return initialize
  113 
  114 ModelBase.__init__ = initialize_decorator(ModelBase.__init__)
  115 
  116 
  117 # Special Fields
  118 class JsonBlob(sql_types.TypeDecorator):
  119 
  120     impl = sql.Text
  121 
  122     def process_bind_param(self, value, dialect):
  123         return jsonutils.dumps(value)
  124 
  125     def process_result_value(self, value, dialect):
  126         if value is not None:
  127             value = jsonutils.loads(value)
  128         return value
  129 
  130 
  131 class DateTimeInt(sql_types.TypeDecorator):
  132     """A column that automatically converts a datetime object to an Int.
  133 
  134     Keystone relies on accurate (sub-second) datetime objects. In some cases
  135     the RDBMS drop sub-second accuracy (some versions of MySQL). This field
  136     automatically converts the value to an INT when storing the data and
  137     back to a datetime object when it is loaded from the database.
  138 
  139     NOTE: Any datetime object that has timezone data will be converted to UTC.
  140           Any datetime object that has no timezone data will be assumed to be
  141           UTC and loaded from the DB as such.
  142     """
  143 
  144     impl = sql.BigInteger
  145     epoch = datetime.datetime.fromtimestamp(0, tz=pytz.UTC)
  146 
  147     def process_bind_param(self, value, dialect):
  148         if value is None:
  149             return value
  150         else:
  151             if not isinstance(value, datetime.datetime):
  152                 raise ValueError(_('Programming Error: value to be stored '
  153                                    'must be a datetime object.'))
  154             value = timeutils.normalize_time(value)
  155             value = value.replace(tzinfo=pytz.UTC)
  156             # NOTE(morgan): We are casting this to an int, and ensuring we
  157             # preserve microsecond data by moving the decimal. This is easier
  158             # than being concerned with the differences in Numeric types in
  159             # different SQL backends.
  160             return int((value - self.epoch).total_seconds() * 1000000)
  161 
  162     def process_result_value(self, value, dialect):
  163         if value is None:
  164             return None
  165         else:
  166             # Convert from INT to appropriate micro-second float (microseconds
  167             # after the decimal) from what was stored to the DB
  168             value = float(value) / 1000000
  169             # NOTE(morgan): Explictly use timezone "pytz.UTC" to ensure we are
  170             # not adjusting the actual datetime object from what we stored.
  171             dt_obj = datetime.datetime.fromtimestamp(value, tz=pytz.UTC)
  172             # Return non-tz aware datetime object (as keystone expects)
  173             return timeutils.normalize_time(dt_obj)
  174 
  175 
  176 class ModelDictMixinWithExtras(models.ModelBase):
  177     """Mixin making model behave with dict-like interfaces includes extras.
  178 
  179     NOTE: DO NOT USE THIS FOR FUTURE SQL MODELS. "Extra" column is a legacy
  180           concept that should not be carried forward with new SQL models
  181           as the concept of "arbitrary" properties is not in line with
  182           the design philosophy of Keystone.
  183     """
  184 
  185     attributes = []
  186     _msg = ('Programming Error: Model does not have an "extra" column. '
  187             'Unless the model already has an "extra" column and has '
  188             'existed in a previous released version of keystone with '
  189             'the extra column included, the model should use '
  190             '"ModelDictMixin" instead.')
  191 
  192     @classmethod
  193     def from_dict(cls, d):
  194         new_d = d.copy()
  195 
  196         if not hasattr(cls, 'extra'):
  197             # NOTE(notmorgan): No translation here, This is an error for
  198             # programmers NOT end users.
  199             raise AttributeError(cls._msg)  # no qa
  200 
  201         new_d['extra'] = {k: new_d.pop(k) for k in d.keys()
  202                           if k not in cls.attributes and k != 'extra'}
  203 
  204         return cls(**new_d)
  205 
  206     def to_dict(self, include_extra_dict=False):
  207         """Return the model's attributes as a dictionary.
  208 
  209         If include_extra_dict is True, 'extra' attributes are literally
  210         included in the resulting dictionary twice, for backwards-compatibility
  211         with a broken implementation.
  212 
  213         """
  214         if not hasattr(self, 'extra'):
  215             # NOTE(notmorgan): No translation here, This is an error for
  216             # programmers NOT end users.
  217             raise AttributeError(self._msg)  # no qa
  218 
  219         d = self.extra.copy()
  220         for attr in self.__class__.attributes:
  221             d[attr] = getattr(self, attr)
  222 
  223         if include_extra_dict:
  224             d['extra'] = self.extra.copy()
  225 
  226         return d
  227 
  228     def __getitem__(self, key):
  229         """Evaluate if key is in extra or not, to return correct item."""
  230         if key in self.extra:
  231             return self.extra[key]
  232         return getattr(self, key)
  233 
  234 
  235 class ModelDictMixin(models.ModelBase):
  236 
  237     @classmethod
  238     def from_dict(cls, d):
  239         """Return a model instance from a dictionary."""
  240         return cls(**d)
  241 
  242     def to_dict(self):
  243         """Return the model's attributes as a dictionary."""
  244         names = (column.name for column in self.__table__.columns)
  245         return {name: getattr(self, name) for name in names}
  246 
  247 
  248 _main_context_manager = None
  249 
  250 
  251 def _get_main_context_manager():
  252     global _main_context_manager
  253 
  254     if not _main_context_manager:
  255         _main_context_manager = enginefacade.transaction_context()
  256 
  257     return _main_context_manager
  258 
  259 
  260 # Now this function is only used for testing FK with sqlite.
  261 def enable_sqlite_foreign_key():
  262     global _main_context_manager
  263     if not _main_context_manager:
  264         _main_context_manager = enginefacade.transaction_context()
  265         _main_context_manager.configure(sqlite_fk=True)
  266 
  267 
  268 def cleanup():
  269     global _main_context_manager
  270 
  271     _main_context_manager = None
  272 
  273 
  274 _CONTEXT = None
  275 
  276 
  277 def _get_context():
  278     global _CONTEXT
  279     if _CONTEXT is None:
  280         # NOTE(dims): Delay the `threading.local` import to allow for
  281         # eventlet/gevent monkeypatching to happen
  282         import threading
  283         _CONTEXT = threading.local()
  284     return _CONTEXT
  285 
  286 
  287 # Unit tests set this to True so that oslo.db's global engine is used.
  288 # This allows oslo_db.test_base.DbTestCase to override the transaction manager
  289 # with its test transaction manager.
  290 _TESTING_USE_GLOBAL_CONTEXT_MANAGER = False
  291 
  292 
  293 def session_for_read():
  294     if _TESTING_USE_GLOBAL_CONTEXT_MANAGER:
  295         reader = enginefacade.reader
  296     else:
  297         reader = _get_main_context_manager().reader
  298     return _wrap_session(reader.using(_get_context()))
  299 
  300 
  301 def session_for_write():
  302     if _TESTING_USE_GLOBAL_CONTEXT_MANAGER:
  303         writer = enginefacade.writer
  304     else:
  305         writer = _get_main_context_manager().writer
  306     return _wrap_session(writer.using(_get_context()))
  307 
  308 
  309 def _wrap_session(sess):
  310     if CONF.profiler.enabled and CONF.profiler.trace_sqlalchemy:
  311         sess = osprofiler.sqlalchemy.wrap_session(sql, sess)
  312     return sess
  313 
  314 
  315 def truncated(f):
  316     return driver_hints.truncated(f)
  317 
  318 
  319 class _WontMatch(Exception):
  320     """Raised to indicate that the filter won't match.
  321 
  322     This is raised to short-circuit the computation of the filter as soon as
  323     it's discovered that the filter requested isn't going to match anything.
  324 
  325     A filter isn't going to match anything if the value is too long for the
  326     field, for example.
  327 
  328     """
  329 
  330     @classmethod
  331     def check(cls, value, col_attr):
  332         """Check if the value can match given the column attributes.
  333 
  334         Raises this class if the value provided can't match any value in the
  335         column in the table given the column's attributes. For example, if the
  336         column is a string and the value is longer than the column then it
  337         won't match any value in the column in the table.
  338 
  339         """
  340         if value is None:
  341             return
  342         col = col_attr.property.columns[0]
  343         if isinstance(col.type, sql.types.Boolean):
  344             # The column is a Boolean, we should have already validated input.
  345             return
  346         if not col.type.length:
  347             # The column doesn't have a length so can't validate anymore.
  348             return
  349         if len(value) > col.type.length:
  350             raise cls()
  351         # Otherwise the value could match a value in the column.
  352 
  353 
  354 def _filter(model, query, hints):
  355     """Apply filtering to a query.
  356 
  357     :param model: the table model in question
  358     :param query: query to apply filters to
  359     :param hints: contains the list of filters yet to be satisfied.
  360                   Any filters satisfied here will be removed so that
  361                   the caller will know if any filters remain.
  362 
  363     :returns: query updated with any filters satisfied
  364 
  365     """
  366     def inexact_filter(model, query, filter_, satisfied_filters):
  367         """Apply an inexact filter to a query.
  368 
  369         :param model: the table model in question
  370         :param query: query to apply filters to
  371         :param dict filter_: describes this filter
  372         :param list satisfied_filters: filter_ will be added if it is
  373                                        satisfied.
  374 
  375         :returns: query updated to add any inexact filters satisfied
  376 
  377         """
  378         column_attr = getattr(model, filter_['name'])
  379 
  380         # TODO(henry-nash): Sqlalchemy 0.7 defaults to case insensitivity
  381         # so once we find a way of changing that (maybe on a call-by-call
  382         # basis), we can add support for the case sensitive versions of
  383         # the filters below.  For now, these case sensitive versions will
  384         # be handled at the controller level.
  385 
  386         if filter_['case_sensitive']:
  387             return query
  388 
  389         if filter_['comparator'] == 'contains':
  390             _WontMatch.check(filter_['value'], column_attr)
  391             query_term = column_attr.ilike('%%%s%%' % filter_['value'])
  392         elif filter_['comparator'] == 'startswith':
  393             _WontMatch.check(filter_['value'], column_attr)
  394             query_term = column_attr.ilike('%s%%' % filter_['value'])
  395         elif filter_['comparator'] == 'endswith':
  396             _WontMatch.check(filter_['value'], column_attr)
  397             query_term = column_attr.ilike('%%%s' % filter_['value'])
  398         else:
  399             # It's a filter we don't understand, so let the caller
  400             # work out if they need to do something with it.
  401             return query
  402 
  403         satisfied_filters.append(filter_)
  404         return query.filter(query_term)
  405 
  406     def exact_filter(model, query, filter_, satisfied_filters):
  407         """Apply an exact filter to a query.
  408 
  409         :param model: the table model in question
  410         :param query: query to apply filters to
  411         :param dict filter_: describes this filter
  412         :param list satisfied_filters: filter_ will be added if it is
  413                                        satisfied.
  414         :returns: query updated to add any exact filters satisfied
  415         """
  416         key = filter_['name']
  417 
  418         col = getattr(model, key)
  419         if isinstance(col.property.columns[0].type, sql.types.Boolean):
  420             filter_val = utils.attr_as_boolean(filter_['value'])
  421         else:
  422             _WontMatch.check(filter_['value'], col)
  423             filter_val = filter_['value']
  424 
  425         satisfied_filters.append(filter_)
  426         return query.filter(col == filter_val)
  427 
  428     try:
  429         satisfied_filters = []
  430         for filter_ in hints.filters:
  431             if filter_['name'] not in model.attributes:
  432                 continue
  433             if filter_['comparator'] == 'equals':
  434                 query = exact_filter(model, query, filter_,
  435                                      satisfied_filters)
  436             else:
  437                 query = inexact_filter(model, query, filter_,
  438                                        satisfied_filters)
  439 
  440         # Remove satisfied filters, then the caller will know remaining filters
  441         for filter_ in satisfied_filters:
  442             hints.filters.remove(filter_)
  443 
  444         return query
  445     except _WontMatch:
  446         hints.cannot_match = True
  447         return
  448 
  449 
  450 def _limit(query, hints):
  451     """Apply a limit to a query.
  452 
  453     :param query: query to apply filters to
  454     :param hints: contains the list of filters and limit details.
  455 
  456     :returns: query updated with any limits satisfied
  457 
  458     """
  459     # NOTE(henry-nash): If we were to implement pagination, then we
  460     # we would expand this method to support pagination and limiting.
  461 
  462     # If we satisfied all the filters, set an upper limit if supplied
  463     if hints.limit:
  464         original_len = query.count()
  465         limit_query = query.limit(hints.limit['limit'])
  466         if limit_query.count() < original_len:
  467             hints.limit['truncated'] = True
  468             query = limit_query
  469     return query
  470 
  471 
  472 def filter_limit_query(model, query, hints):
  473     """Apply filtering and limit to a query.
  474 
  475     :param model: table model
  476     :param query: query to apply filters to
  477     :param hints: contains the list of filters and limit details.  This may
  478                   be None, indicating that there are no filters or limits
  479                   to be applied. If it's not None, then any filters
  480                   satisfied here will be removed so that the caller will
  481                   know if any filters remain.
  482 
  483     :returns: query updated with any filters and limits satisfied
  484 
  485     """
  486     if hints is None:
  487         return query
  488 
  489     # First try and satisfy any filters
  490     query = _filter(model, query, hints)
  491 
  492     if hints.cannot_match:
  493         # Nothing's going to match, so don't bother with the query.
  494         return []
  495 
  496     # NOTE(henry-nash): Any unsatisfied filters will have been left in
  497     # the hints list for the controller to handle. We can only try and
  498     # limit here if all the filters are already satisfied since, if not,
  499     # doing so might mess up the final results. If there are still
  500     # unsatisfied filters, we have to leave any limiting to the controller
  501     # as well.
  502 
  503     if not hints.filters:
  504         return _limit(query, hints)
  505     else:
  506         return query
  507 
  508 
  509 def handle_conflicts(conflict_type='object'):
  510     """Convert select sqlalchemy exceptions into HTTP 409 Conflict."""
  511     _conflict_msg = 'Conflict %(conflict_type)s: %(details)s'
  512 
  513     def decorator(method):
  514         @functools.wraps(method)
  515         def wrapper(*args, **kwargs):
  516             try:
  517                 return method(*args, **kwargs)
  518             except db_exception.DBDuplicateEntry as e:
  519                 # LOG the exception for debug purposes, do not send the
  520                 # exception details out with the raised Conflict exception
  521                 # as it can contain raw SQL.
  522                 LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
  523                                           'details': e})
  524                 name = None
  525                 field = None
  526                 domain_id = None
  527                 # First element is unnecessary for extracting name and causes
  528                 # object not iterable error. Remove it.
  529                 params = args[1:]
  530                 # We want to store the duplicate objects name in the error
  531                 # message for the user. If name is not available we use the id.
  532                 for arg in params:
  533                     if isinstance(arg, dict):
  534                         if 'name' in arg:
  535                             field = 'name'
  536                             name = arg['name']
  537                         elif 'id' in arg:
  538                             field = 'ID'
  539                             name = arg['id']
  540                         if 'domain_id' in arg:
  541                             domain_id = arg['domain_id']
  542                 msg = _('Duplicate entry')
  543                 if name and domain_id:
  544                     msg = _('Duplicate entry found with %(field)s %(name)s '
  545                             'at domain ID %(domain_id)s') % {
  546                         'field': field, 'name': name, 'domain_id': domain_id}
  547                 elif name:
  548                     msg = _('Duplicate entry found with %(field)s '
  549                             '%(name)s') % {'field': field, 'name': name}
  550                 elif domain_id:
  551                     msg = (_('Duplicate entry at domain ID %s') % domain_id)
  552                 raise exception.Conflict(type=conflict_type,
  553                                          details=msg)
  554             except db_exception.DBError as e:
  555                 # TODO(blk-u): inspecting inner_exception breaks encapsulation;
  556                 # oslo_db should provide exception we need.
  557                 if isinstance(e.inner_exception, IntegrityError):
  558                     # LOG the exception for debug purposes, do not send the
  559                     # exception details out with the raised Conflict exception
  560                     # as it can contain raw SQL.
  561                     LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
  562                                               'details': e})
  563                     # NOTE(morganfainberg): This is really a case where the SQL
  564                     # failed to store the data. This is not something that the
  565                     # user has done wrong. Example would be a ForeignKey is
  566                     # missing; the code that is executed before reaching the
  567                     # SQL writing to the DB should catch the issue.
  568                     raise exception.UnexpectedError(
  569                         _('An unexpected error occurred when trying to '
  570                           'store %s') % conflict_type)
  571                 raise
  572 
  573         return wrapper
  574     return decorator