"Fossies" - the Fresh Open Source Software Archive

Member "cloudkitty-13.0.0/cloudkitty/tests/storage/v2/test_influxdb.py" (14 Oct 2020, 7806 Bytes) of package /linux/misc/openstack/cloudkitty-13.0.0.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. See also the latest Fossies "Diffs" side-by-side code changes report for "test_influxdb.py": 12.1.0_vs_13.0.0.

    1 # Copyright 2019 Objectif Libre
    2 #
    3 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
    4 #    not use this file except in compliance with the License. You may obtain
    5 #    a copy of the License at
    6 #
    7 #         http://www.apache.org/licenses/LICENSE-2.0
    8 #
    9 #    Unless required by applicable law or agreed to in writing, software
   10 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
   11 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
   12 #    License for the specific language governing permissions and limitations
   13 #    under the License.
   14 #
   15 import collections
   16 import copy
   17 from datetime import datetime
   18 from datetime import timedelta
   19 import unittest
   20 from unittest import mock
   21 
   22 from dateutil import tz
   23 
   24 from cloudkitty import dataframe
   25 from cloudkitty.storage.v2 import influx
   26 from cloudkitty.tests import TestCase
   27 from cloudkitty.utils import tz as tzutils
   28 
   29 
   30 class TestInfluxDBStorage(TestCase):
   31 
   32     def setUp(self):
   33         super(TestInfluxDBStorage, self).setUp()
   34         self.point = {
   35             'type': 'amazing_type',
   36             'unit': 'banana',
   37             'qty': 42,
   38             'price': 1.0,
   39             'groupby': 'one|two',
   40             'metadata': '1|2',
   41             'one': '1',
   42             'two': '2',
   43             '1': 'one',
   44             '2': 'two',
   45             'time': datetime(2019, 1, 1, tzinfo=tz.tzutc()).isoformat(),
   46         }
   47 
   48     def test_point_to_dataframe_entry_valid_point(self):
   49         self.assertEqual(
   50             influx.InfluxStorage._point_to_dataframe_entry(self.point),
   51             dataframe.DataPoint(
   52                 'banana',
   53                 42,
   54                 1,
   55                 {'one': '1', 'two': '2'},
   56                 {'1': 'one', '2': 'two'},
   57             ),
   58         )
   59 
   60     def test_point_to_dataframe_entry_invalid_groupby_metadata(self):
   61         point = copy.deepcopy(self.point)
   62         point['groupby'] = 'a'
   63         point['metadata'] = None
   64         self.assertEqual(
   65             influx.InfluxStorage._point_to_dataframe_entry(point),
   66             dataframe.DataPoint(
   67                 'banana',
   68                 42,
   69                 1,
   70                 {'a': ''},
   71                 {},
   72             ),
   73         )
   74 
   75     def test_build_dataframes_differenciates_periods(self):
   76         points = [copy.deepcopy(self.point) for _ in range(3)]
   77         for idx, point in enumerate(points):
   78             point[influx.PERIOD_FIELD_NAME] = 100 * (idx + 1)
   79 
   80         dataframes = influx.InfluxStorage()._build_dataframes(points)
   81         self.assertEqual(len(dataframes), 3)
   82 
   83         for idx, frame in enumerate(dataframes):
   84             self.assertEqual(
   85                 frame.start, datetime(2019, 1, 1, tzinfo=tz.tzutc()))
   86             delta = timedelta(seconds=(idx + 1) * 100)
   87             self.assertEqual(frame.end,
   88                              datetime(2019, 1, 1, tzinfo=tz.tzutc()) + delta)
   89             typelist = list(frame.itertypes())
   90             self.assertEqual(len(typelist), 1)
   91             type_, points = typelist[0]
   92             self.assertEqual(len(points), 1)
   93             self.assertEqual(type_, 'amazing_type')
   94 
   95 
   96 class FakeResultSet(object):
   97     def __init__(self, points=[], items=[]):
   98         self._points = points
   99         self._items = items
  100 
  101     def get_points(self):
  102         return self._points
  103 
  104     def items(self):
  105         return self._items
  106 
  107 
  108 class TestInfluxClient(unittest.TestCase):
  109     def setUp(self):
  110         self.period_begin = tzutils.local_to_utc(
  111             tzutils.get_month_start()).isoformat()
  112         self.period_end = tzutils.local_to_utc(
  113             tzutils.get_next_month()).isoformat()
  114         self.client = influx.InfluxClient()
  115         self._storage = influx.InfluxStorage()
  116 
  117     def test_get_filter_query(self):
  118         filters = collections.OrderedDict(
  119             (('str_filter', 'one'), ('float_filter', 2.0)))
  120         self.assertEqual(
  121             self.client._get_filter_query(filters),
  122             """ AND "str_filter"='one' AND "float_filter"=2.0"""
  123         )
  124 
  125     def test_get_filter_query_no_filters(self):
  126         self.assertEqual(self.client._get_filter_query({}), '')
  127 
  128     def test_retrieve_format_with_pagination(self):
  129         self._storage._conn._conn.query = m = mock.MagicMock()
  130         m.return_value = (FakeResultSet(), FakeResultSet())
  131 
  132         self._storage.retrieve()
  133         m.assert_called_once_with(
  134             "SELECT COUNT(groupby) FROM \"dataframes\""
  135             " WHERE time >= '{0}'"
  136             " AND time < '{1}';"
  137             "SELECT * FROM \"dataframes\""
  138             " WHERE time >= '{0}'"
  139             " AND time < '{1}'"
  140             " LIMIT 1000 OFFSET 0;".format(
  141                 self.period_begin, self.period_end,
  142             ))
  143 
  144     def test_retrieve_format_with_types(self):
  145         self._storage._conn._conn.query = m = mock.MagicMock()
  146         m.return_value = (FakeResultSet(), FakeResultSet())
  147 
  148         self._storage.retrieve(metric_types=['foo', 'bar'])
  149         m.assert_called_once_with(
  150             "SELECT COUNT(groupby) FROM \"dataframes\""
  151             " WHERE time >= '{0}'"
  152             " AND time < '{1}'"
  153             " AND (type='foo' OR type='bar');"
  154             "SELECT * FROM \"dataframes\""
  155             " WHERE time >= '{0}'"
  156             " AND time < '{1}'"
  157             " AND (type='foo' OR type='bar')"
  158             " LIMIT 1000 OFFSET 0;".format(
  159                 self.period_begin, self.period_end,
  160             ))
  161 
  162     def test_delete_no_parameters(self):
  163         self._storage._conn._conn.query = m = mock.MagicMock()
  164         self._storage.delete()
  165         m.assert_called_once_with('DELETE FROM "dataframes";')
  166 
  167     def test_delete_begin_end(self):
  168         self._storage._conn._conn.query = m = mock.MagicMock()
  169         self._storage.delete(begin=datetime(2019, 1, 1),
  170                              end=datetime(2019, 1, 2))
  171         m.assert_called_once_with(
  172             """DELETE FROM "dataframes" WHERE time >= '2019-01-01T00:00:00'"""
  173             """ AND time < '2019-01-02T00:00:00';""")
  174 
  175     def test_delete_begin_end_filters(self):
  176         self._storage._conn._conn.query = m = mock.MagicMock()
  177         self._storage.delete(
  178             begin=datetime(2019, 1, 1), end=datetime(2019, 1, 2),
  179             filters={'project_id': 'foobar'})
  180         m.assert_called_once_with(
  181             """DELETE FROM "dataframes" WHERE time >= '2019-01-01T00:00:00'"""
  182             """ AND time < '2019-01-02T00:00:00' AND "project_id"='foobar';"""
  183         )
  184 
  185     def test_delete_end_filters(self):
  186         self._storage._conn._conn.query = m = mock.MagicMock()
  187         self._storage.delete(end=datetime(2019, 1, 2),
  188                              filters={'project_id': 'foobar'})
  189         m.assert_called_once_with(
  190             """DELETE FROM "dataframes" WHERE time < '2019-01-02T00:00:00' """
  191             """AND "project_id"='foobar';""")
  192 
  193     def test_delete_begin_filters(self):
  194         self._storage._conn._conn.query = m = mock.MagicMock()
  195         self._storage.delete(begin=datetime(2019, 1, 2),
  196                              filters={'project_id': 'foobar'})
  197         m.assert_called_once_with(
  198             """DELETE FROM "dataframes" WHERE time >= '2019-01-02T00:00:00'"""
  199             """ AND "project_id"='foobar';""")
  200 
  201     def test_delete_begin(self):
  202         self._storage._conn._conn.query = m = mock.MagicMock()
  203         self._storage.delete(begin=datetime(2019, 1, 2))
  204         m.assert_called_once_with("""DELETE FROM "dataframes" WHERE """
  205                                   """time >= '2019-01-02T00:00:00';""")
  206 
  207     def test_delete_end(self):
  208         self._storage._conn._conn.query = m = mock.MagicMock()
  209         self._storage.delete(end=datetime(2019, 1, 2))
  210         m.assert_called_once_with("""DELETE FROM "dataframes" WHERE """
  211                                   """time < '2019-01-02T00:00:00';""")