"Fossies" - the Fresh Open Source Software Archive 
Member "manila-11.0.1/manila/tests/db/sqlalchemy/test_api.py" (1 Feb 2021, 184128 Bytes) of package /linux/misc/openstack/manila-11.0.1.tar.gz:
As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style:
standard) with prefixed line numbers.
Alternatively you can here
view or
download the uninterpreted source code file.
See also the latest
Fossies "Diffs" side-by-side code changes report for "test_api.py":
11.0.0_vs_11.0.1.
1 # Copyright 2013 OpenStack Foundation
2 # Copyright (c) 2014 NetApp, Inc.
3 # Copyright (c) 2015 Rushil Chugh
4 # All Rights Reserved.
5 #
6 # Licensed under the Apache License, Version 2.0 (the "License"); you may
7 # not use this file except in compliance with the License. You may obtain
8 # a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing, software
13 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15 # License for the specific language governing permissions and limitations
16 # under the License.
17
18 """Testing of SQLAlchemy backend."""
19
20 import copy
21 import datetime
22 import random
23 from unittest import mock
24
25 import ddt
26 from oslo_db import exception as db_exception
27 from oslo_utils import timeutils
28 from oslo_utils import uuidutils
29 import six
30
31 from manila.common import constants
32 from manila import context
33 from manila.db.sqlalchemy import api as db_api
34 from manila.db.sqlalchemy import models
35 from manila import exception
36 from manila import quota
37 from manila import test
38 from manila.tests import db_utils
39
40 QUOTAS = quota.QUOTAS
41
42 security_service_dict = {
43 'id': 'fake id',
44 'project_id': 'fake project',
45 'type': 'ldap',
46 'dns_ip': 'fake dns',
47 'server': 'fake ldap server',
48 'domain': 'fake ldap domain',
49 'ou': 'fake ldap ou',
50 'user': 'fake user',
51 'password': 'fake password',
52 'name': 'whatever',
53 'description': 'nevermind',
54 }
55
56
57 class BaseDatabaseAPITestCase(test.TestCase):
58 def _check_fields(self, expected, actual):
59 for key in expected:
60 self.assertEqual(expected[key], actual[key])
61
62
63 @ddt.ddt
64 class GenericDatabaseAPITestCase(test.TestCase):
65
66 def setUp(self):
67 """Run before each test."""
68 super(GenericDatabaseAPITestCase, self).setUp()
69 self.ctxt = context.get_admin_context()
70
71 @ddt.unpack
72 @ddt.data(
73 {'values': {'test': 'fake'}, 'call_count': 1},
74 {'values': {'test': 'fake', 'id': 'fake'}, 'call_count': 0},
75 {'values': {'test': 'fake', 'fooid': 'fake'}, 'call_count': 1},
76 {'values': {'test': 'fake', 'idfoo': 'fake'}, 'call_count': 1},
77 )
78 def test_ensure_model_values_has_id(self, values, call_count):
79 self.mock_object(uuidutils, 'generate_uuid')
80
81 db_api.ensure_model_dict_has_id(values)
82
83 self.assertEqual(call_count, uuidutils.generate_uuid.call_count)
84 self.assertIn('id', values)
85
86 def test_custom_query(self):
87 share = db_utils.create_share()
88 share_access = db_utils.create_access(share_id=share['id'])
89
90 db_api.share_instance_access_delete(
91 self.ctxt, share_access.instance_mappings[0].id)
92 self.assertRaises(exception.NotFound, db_api.share_access_get,
93 self.ctxt, share_access.id)
94
95
96 @ddt.ddt
97 class ShareAccessDatabaseAPITestCase(test.TestCase):
98
99 def setUp(self):
100 """Run before each test."""
101 super(ShareAccessDatabaseAPITestCase, self).setUp()
102 self.ctxt = context.get_admin_context()
103
104 @ddt.data(0, 3)
105 def test_share_access_get_all_for_share(self, len_rules):
106 share = db_utils.create_share()
107 rules = [db_utils.create_access(share_id=share['id'])
108 for i in range(0, len_rules)]
109 rule_ids = [r['id'] for r in rules]
110
111 result = db_api.share_access_get_all_for_share(self.ctxt, share['id'])
112
113 self.assertEqual(len_rules, len(result))
114 result_ids = [r['id'] for r in result]
115 self.assertEqual(rule_ids, result_ids)
116
117 def test_share_access_get_all_for_share_no_instance_mappings(self):
118 share = db_utils.create_share()
119 share_instance = share['instance']
120 rule = db_utils.create_access(share_id=share['id'])
121 # Mark instance mapping soft deleted
122 db_api.share_instance_access_update(
123 self.ctxt, rule['id'], share_instance['id'], {'deleted': "True"})
124
125 result = db_api.share_access_get_all_for_share(self.ctxt, share['id'])
126
127 self.assertEqual([], result)
128
129 def test_share_instance_access_update(self):
130 share = db_utils.create_share()
131 access = db_utils.create_access(share_id=share['id'])
132
133 instance_access_mapping = db_api.share_instance_access_get(
134 self.ctxt, access['id'], share.instance['id'])
135 self.assertEqual(constants.ACCESS_STATE_QUEUED_TO_APPLY,
136 access['state'])
137 self.assertIsNone(access['access_key'])
138
139 db_api.share_instance_access_update(
140 self.ctxt, access['id'], share.instance['id'],
141 {'state': constants.STATUS_ERROR, 'access_key': 'watson4heisman'})
142
143 instance_access_mapping = db_api.share_instance_access_get(
144 self.ctxt, access['id'], share.instance['id'])
145 access = db_api.share_access_get(self.ctxt, access['id'])
146 self.assertEqual(constants.STATUS_ERROR,
147 instance_access_mapping['state'])
148 self.assertEqual('watson4heisman', access['access_key'])
149
150 @ddt.data(True, False)
151 def test_share_access_get_all_for_instance_with_share_access_data(
152 self, with_share_access_data):
153 share = db_utils.create_share()
154 access_1 = db_utils.create_access(share_id=share['id'])
155 access_2 = db_utils.create_access(share_id=share['id'])
156 share_access_keys = ('access_to', 'access_type', 'access_level',
157 'share_id')
158
159 rules = db_api.share_access_get_all_for_instance(
160 self.ctxt, share.instance['id'],
161 with_share_access_data=with_share_access_data)
162
163 share_access_keys_present = True if with_share_access_data else False
164 actual_access_ids = [r['access_id'] for r in rules]
165 self.assertTrue(isinstance(actual_access_ids, list))
166 expected = [access_1['id'], access_2['id']]
167 self.assertEqual(len(expected), len(actual_access_ids))
168 for pool in expected:
169 self.assertIn(pool, actual_access_ids)
170 for rule in rules:
171 for key in share_access_keys:
172 self.assertEqual(share_access_keys_present, key in rule)
173 self.assertIn('state', rule)
174
175 def test_share_access_get_all_for_instance_with_filters(self):
176 share = db_utils.create_share()
177 new_share_instance = db_utils.create_share_instance(
178 share_id=share['id'])
179 access_1 = db_utils.create_access(share_id=share['id'])
180 access_2 = db_utils.create_access(share_id=share['id'])
181 share_access_keys = ('access_to', 'access_type', 'access_level',
182 'share_id')
183 db_api.share_instance_access_update(
184 self.ctxt, access_1['id'], new_share_instance['id'],
185 {'state': constants.STATUS_ACTIVE})
186
187 rules = db_api.share_access_get_all_for_instance(
188 self.ctxt, new_share_instance['id'],
189 filters={'state': constants.ACCESS_STATE_QUEUED_TO_APPLY})
190
191 self.assertEqual(1, len(rules))
192 self.assertEqual(access_2['id'], rules[0]['access_id'])
193
194 for rule in rules:
195 for key in share_access_keys:
196 self.assertIn(key, rule)
197
198 def test_share_instance_access_delete(self):
199 share = db_utils.create_share()
200 access = db_utils.create_access(share_id=share['id'],
201 metadata={'key1': 'v1'})
202 instance_access_mapping = db_api.share_instance_access_get(
203 self.ctxt, access['id'], share.instance['id'])
204
205 db_api.share_instance_access_delete(
206 self.ctxt, instance_access_mapping['id'])
207
208 rules = db_api.share_access_get_all_for_instance(
209 self.ctxt, share.instance['id'])
210 self.assertEqual([], rules)
211
212 self.assertRaises(exception.NotFound, db_api.share_instance_access_get,
213 self.ctxt, access['id'], share['instance']['id'])
214
215 def test_one_share_with_two_share_instance_access_delete(self):
216 metadata = {'key2': 'v2', 'key3': 'v3'}
217 share = db_utils.create_share()
218 instance = db_utils.create_share_instance(share_id=share['id'])
219 access = db_utils.create_access(share_id=share['id'],
220 metadata=metadata)
221 instance_access_mapping1 = db_api.share_instance_access_get(
222 self.ctxt, access['id'], share.instance['id'])
223 instance_access_mapping2 = db_api.share_instance_access_get(
224 self.ctxt, access['id'], instance['id'])
225 self.assertEqual(instance_access_mapping1['access_id'],
226 instance_access_mapping2['access_id'])
227 db_api.share_instance_delete(self.ctxt, instance['id'])
228
229 get_accesses = db_api.share_access_get_all_for_share(self.ctxt,
230 share['id'])
231 self.assertEqual(1, len(get_accesses))
232 get_metadata = (
233 get_accesses[0].get('share_access_rules_metadata') or {})
234 get_metadata = {item['key']: item['value'] for item in get_metadata}
235 self.assertEqual(metadata, get_metadata)
236 self.assertEqual(access['id'], get_accesses[0]['id'])
237
238 db_api.share_instance_delete(self.ctxt, share['instance']['id'])
239 self.assertRaises(exception.NotFound,
240 db_api.share_instance_access_get,
241 self.ctxt, access['id'], share['instance']['id'])
242
243 get_accesses = db_api.share_access_get_all_for_share(self.ctxt,
244 share['id'])
245 self.assertEqual(0, len(get_accesses))
246
247 @ddt.data(True, False)
248 def test_share_instance_access_get_with_share_access_data(
249 self, with_share_access_data):
250 share = db_utils.create_share()
251 access = db_utils.create_access(share_id=share['id'])
252
253 instance_access = db_api.share_instance_access_get(
254 self.ctxt, access['id'], share['instance']['id'],
255 with_share_access_data=with_share_access_data)
256
257 for key in ('share_id', 'access_type', 'access_to', 'access_level',
258 'access_key'):
259 self.assertEqual(with_share_access_data, key in instance_access)
260
261 @ddt.data({'existing': {'access_type': 'cephx', 'access_to': 'alice'},
262 'new': {'access_type': 'user', 'access_to': 'alice'},
263 'result': False},
264 {'existing': {'access_type': 'user', 'access_to': 'bob'},
265 'new': {'access_type': 'user', 'access_to': 'bob'},
266 'result': True},
267 {'existing': {'access_type': 'ip', 'access_to': '10.0.0.10/32'},
268 'new': {'access_type': 'ip', 'access_to': '10.0.0.10'},
269 'result': True},
270 {'existing': {'access_type': 'ip', 'access_to': '10.10.0.11'},
271 'new': {'access_type': 'ip', 'access_to': '10.10.0.11'},
272 'result': True},
273 {'existing': {'access_type': 'ip', 'access_to': 'fd21::11'},
274 'new': {'access_type': 'ip', 'access_to': 'fd21::11'},
275 'result': True},
276 {'existing': {'access_type': 'ip', 'access_to': 'fd21::10'},
277 'new': {'access_type': 'ip', 'access_to': 'fd21::10/128'},
278 'result': True},
279 {'existing': {'access_type': 'ip', 'access_to': '10.10.0.0/22'},
280 'new': {'access_type': 'ip', 'access_to': '10.10.0.0/24'},
281 'result': False},
282 {'existing': {'access_type': 'ip', 'access_to': '2620:52::/48'},
283 'new': {'access_type': 'ip',
284 'access_to': '2620:52:0:13b8::/64'},
285 'result': False})
286 @ddt.unpack
287 def test_share_access_check_for_existing_access(self, existing, new,
288 result):
289 share = db_utils.create_share()
290 db_utils.create_access(share_id=share['id'],
291 access_type=existing['access_type'],
292 access_to=existing['access_to'])
293
294 rule_exists = db_api.share_access_check_for_existing_access(
295 self.ctxt, share['id'], new['access_type'], new['access_to'])
296
297 self.assertEqual(result, rule_exists)
298
299 def test_share_access_get_all_for_share_with_metadata(self):
300 share = db_utils.create_share()
301 rules = [db_utils.create_access(
302 share_id=share['id'], metadata={'key1': i})
303 for i in range(0, 3)]
304 rule_ids = [r['id'] for r in rules]
305
306 result = db_api.share_access_get_all_for_share(self.ctxt, share['id'])
307
308 self.assertEqual(3, len(result))
309 result_ids = [r['id'] for r in result]
310 self.assertEqual(rule_ids, result_ids)
311
312 result = db_api.share_access_get_all_for_share(
313 self.ctxt, share['id'], {'metadata': {'key1': '2'}})
314 self.assertEqual(1, len(result))
315 self.assertEqual(rules[2]['id'], result[0]['id'])
316
317 def test_share_access_metadata_update(self):
318 share = db_utils.create_share()
319 new_metadata = {'key1': 'test_update', 'key2': 'v2'}
320 rule = db_utils.create_access(share_id=share['id'],
321 metadata={'key1': 'v1'})
322 result_metadata = db_api.share_access_metadata_update(
323 self.ctxt, rule['id'], metadata=new_metadata)
324 result = db_api.share_access_get(self.ctxt, rule['id'])
325 self.assertEqual(new_metadata, result_metadata)
326 metadata = result.get('share_access_rules_metadata')
327 if metadata:
328 metadata = {item['key']: item['value'] for item in metadata}
329 else:
330 metadata = {}
331 self.assertEqual(new_metadata, metadata)
332
333
334 @ddt.ddt
335 class ShareDatabaseAPITestCase(test.TestCase):
336
337 def setUp(self):
338 """Run before each test."""
339 super(ShareDatabaseAPITestCase, self).setUp()
340 self.ctxt = context.get_admin_context()
341
342 def test_share_filter_by_host_with_pools(self):
343 share_instances = [[
344 db_api.share_create(self.ctxt, {'host': value}).instance
345 for value in ('foo', 'foo#pool0')]]
346
347 db_utils.create_share()
348 self._assertEqualListsOfObjects(share_instances[0],
349 db_api.share_instances_get_all_by_host(
350 self.ctxt, 'foo'),
351 ignored_keys=['share_type',
352 'share_type_id',
353 'export_locations'])
354
355 def test_share_filter_all_by_host_with_pools_multiple_hosts(self):
356 share_instances = [[
357 db_api.share_create(self.ctxt, {'host': value}).instance
358 for value in ('foo', 'foo#pool0', 'foo', 'foo#pool1')]]
359
360 db_utils.create_share()
361 self._assertEqualListsOfObjects(share_instances[0],
362 db_api.share_instances_get_all_by_host(
363 self.ctxt, 'foo'),
364 ignored_keys=['share_type',
365 'share_type_id',
366 'export_locations'])
367
368 def test_share_filter_all_by_share_server(self):
369 share_network = db_utils.create_share_network()
370 share_server = db_utils.create_share_server(
371 share_network_id=share_network['id'])
372 share = db_utils.create_share(share_server_id=share_server['id'],
373 share_network_id=share_network['id'])
374
375 actual_result = db_api.share_get_all_by_share_server(
376 self.ctxt, share_server['id'])
377
378 self.assertEqual(1, len(actual_result))
379 self.assertEqual(share['id'], actual_result[0].id)
380
381 def test_share_filter_all_by_share_group(self):
382 group = db_utils.create_share_group()
383 share = db_utils.create_share(share_group_id=group['id'])
384
385 actual_result = db_api.share_get_all_by_share_group_id(
386 self.ctxt, group['id'])
387
388 self.assertEqual(1, len(actual_result))
389 self.assertEqual(share['id'], actual_result[0].id)
390
391 def test_share_instance_delete_with_share(self):
392 share = db_utils.create_share()
393
394 self.assertIsNotNone(db_api.share_get(self.ctxt, share['id']))
395 self.assertIsNotNone(db_api.share_metadata_get(self.ctxt, share['id']))
396
397 db_api.share_instance_delete(self.ctxt, share.instance['id'])
398
399 self.assertRaises(exception.NotFound, db_api.share_get,
400 self.ctxt, share['id'])
401 self.assertRaises(exception.NotFound, db_api.share_metadata_get,
402 self.ctxt, share['id'])
403
404 def test_share_instance_delete_with_share_need_to_update_usages(self):
405 share = db_utils.create_share()
406
407 self.assertIsNotNone(db_api.share_get(self.ctxt, share['id']))
408 self.assertIsNotNone(db_api.share_metadata_get(self.ctxt, share['id']))
409
410 self.mock_object(quota.QUOTAS, 'reserve',
411 mock.Mock(return_value='reservation'))
412 self.mock_object(quota.QUOTAS, 'commit')
413
414 db_api.share_instance_delete(
415 self.ctxt, share.instance['id'], need_to_update_usages=True)
416
417 self.assertRaises(exception.NotFound, db_api.share_get,
418 self.ctxt, share['id'])
419 self.assertRaises(exception.NotFound, db_api.share_metadata_get,
420 self.ctxt, share['id'])
421 quota.QUOTAS.reserve.assert_called_once_with(
422 self.ctxt,
423 project_id=share['project_id'],
424 shares=-1,
425 gigabytes=-share['size'],
426 share_type_id=None,
427 user_id=share['user_id']
428 )
429 quota.QUOTAS.commit.assert_called_once_with(
430 self.ctxt,
431 mock.ANY,
432 project_id=share['project_id'],
433 share_type_id=None,
434 user_id=share['user_id']
435 )
436
437 def test_share_instance_get(self):
438 share = db_utils.create_share()
439
440 instance = db_api.share_instance_get(self.ctxt, share.instance['id'])
441
442 self.assertEqual('share-%s' % instance['id'], instance['name'])
443
444 @ddt.data({'with_share_data': True, 'status': constants.STATUS_AVAILABLE},
445 {'with_share_data': False, 'status': None})
446 @ddt.unpack
447 def test_share_instance_get_all_by_host(self, with_share_data, status):
448 kwargs = {'status': status} if status else {}
449 db_utils.create_share(**kwargs)
450 instances = db_api.share_instances_get_all_by_host(
451 self.ctxt, 'fake_host', with_share_data=with_share_data,
452 status=status)
453
454 self.assertEqual(1, len(instances))
455 instance = instances[0]
456
457 self.assertEqual('share-%s' % instance['id'], instance['name'])
458
459 if with_share_data:
460 self.assertEqual('NFS', instance['share_proto'])
461 self.assertEqual(0, instance['size'])
462 else:
463 self.assertNotIn('share_proto', instance)
464
465 def test_share_instance_get_all_by_host_not_found_exception(self):
466 db_utils.create_share()
467 self.mock_object(db_api, 'share_get', mock.Mock(
468 side_effect=exception.NotFound))
469 instances = db_api.share_instances_get_all_by_host(
470 self.ctxt, 'fake_host', True)
471
472 self.assertEqual(0, len(instances))
473
474 def test_share_instance_get_all_by_share_group(self):
475 group = db_utils.create_share_group()
476 db_utils.create_share(share_group_id=group['id'])
477 db_utils.create_share()
478
479 instances = db_api.share_instances_get_all_by_share_group_id(
480 self.ctxt, group['id'])
481
482 self.assertEqual(1, len(instances))
483 instance = instances[0]
484
485 self.assertEqual('share-%s' % instance['id'], instance['name'])
486
487 @ddt.data('id', 'path')
488 def test_share_instance_get_all_by_export_location(self, type):
489 share = db_utils.create_share()
490 initial_location = ['fake_export_location']
491 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
492 initial_location, False)
493
494 if type == 'id':
495 export_location = (
496 db_api.share_export_locations_get_by_share_id(self.ctxt,
497 share['id']))
498 value = export_location[0]['uuid']
499 else:
500 value = 'fake_export_location'
501
502 instances = db_api.share_instances_get_all(
503 self.ctxt, filters={'export_location_' + type: value})
504
505 self.assertEqual(1, len(instances))
506 instance = instances[0]
507
508 self.assertEqual('share-%s' % instance['id'], instance['name'])
509
510 def test_share_instance_get_all_by_ids(self):
511 fake_share = db_utils.create_share()
512 expected_share_instance = db_utils.create_share_instance(
513 share_id=fake_share['id'])
514
515 # Populate the db with a dummy share
516 db_utils.create_share_instance(share_id=fake_share['id'])
517
518 instances = db_api.share_instances_get_all(
519 self.ctxt,
520 filters={'instance_ids': [expected_share_instance['id']]})
521
522 self.assertEqual(1, len(instances))
523 instance = instances[0]
524
525 self.assertEqual('share-%s' % instance['id'], instance['name'])
526
527 @ddt.data('host', 'share_group_id')
528 def test_share_get_all_sort_by_share_instance_fields(self, sort_key):
529 shares = [db_utils.create_share(**{sort_key: n, 'size': 1})
530 for n in ('test1', 'test2')]
531
532 actual_result = db_api.share_get_all(
533 self.ctxt, sort_key=sort_key, sort_dir='desc')
534
535 self.assertEqual(2, len(actual_result))
536 self.assertEqual(shares[0]['id'], actual_result[1]['id'])
537
538 @ddt.data('id', 'path')
539 def test_share_get_all_by_export_location(self, type):
540 share = db_utils.create_share()
541 initial_location = ['fake_export_location']
542 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
543 initial_location, False)
544 if type == 'id':
545 export_location = db_api.share_export_locations_get_by_share_id(
546 self.ctxt, share['id'])
547 value = export_location[0]['uuid']
548 else:
549 value = 'fake_export_location'
550
551 actual_result = db_api.share_get_all(
552 self.ctxt, filters={'export_location_' + type: value})
553
554 self.assertEqual(1, len(actual_result))
555 self.assertEqual(share['id'], actual_result[0]['id'])
556
557 @ddt.data('id', 'path')
558 def test_share_get_all_by_export_location_not_exist(self, type):
559 share = db_utils.create_share()
560 initial_location = ['fake_export_location']
561 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
562 initial_location, False)
563 filter = {'export_location_' + type: 'export_location_not_exist'}
564 actual_result = db_api.share_get_all(self.ctxt, filters=filter)
565
566 self.assertEqual(0, len(actual_result))
567
568 @ddt.data((10, 5), (20, 5))
569 @ddt.unpack
570 def test_share_get_all_with_limit(self, limit, offset):
571 for i in range(limit + 5):
572 db_utils.create_share()
573
574 filters = {'limit': offset, 'offset': 0}
575 shares_not_requested = db_api.share_get_all(
576 self.ctxt, filters=filters)
577
578 filters = {'limit': limit, 'offset': offset}
579 shares_requested = db_api.share_get_all(self.ctxt, filters=filters)
580
581 shares_not_requested_ids = [s['id'] for s in shares_not_requested]
582 shares_requested_ids = [s['id'] for s in shares_requested]
583
584 self.assertEqual(offset, len(shares_not_requested_ids))
585 self.assertEqual(limit, len(shares_requested_ids))
586 self.assertEqual(0, len(
587 set(shares_requested_ids) & set(shares_not_requested_ids)))
588
589 @ddt.data(
590 ({'status': constants.STATUS_AVAILABLE}, 'status',
591 [constants.STATUS_AVAILABLE, constants.STATUS_ERROR]),
592 ({'share_group_id': 'fake_group_id'}, 'share_group_id',
593 ['fake_group_id', 'group_id']),
594 ({'snapshot_id': 'fake_snapshot_id'}, 'snapshot_id',
595 ['fake_snapshot_id', 'snapshot_id']),
596 ({'share_type_id': 'fake_type_id'}, 'share_type_id',
597 ['fake_type_id', 'type_id']),
598 ({'host': 'fakehost@fakebackend#fakepool'}, 'host',
599 ['fakehost@fakebackend#fakepool', 'foo@bar#test']),
600 ({'share_network_id': 'fake_net_id'}, 'share_network_id',
601 ['fake_net_id', 'net_id']),
602 ({'display_name': 'fake_share_name'}, 'display_name',
603 ['fake_share_name', 'share_name']),
604 ({'display_description': 'fake description'}, 'display_description',
605 ['fake description', 'description'])
606 )
607 @ddt.unpack
608 def test_share_get_all_with_filters(self, filters, key, share_values):
609 for value in share_values:
610 kwargs = {key: value}
611 db_utils.create_share(**kwargs)
612
613 results = db_api.share_get_all(self.ctxt, filters=filters)
614
615 for share in results:
616 self.assertEqual(share[key], filters[key])
617
618 @ddt.data(
619 ('display_name~', 'display_name',
620 ['fake_name_1', 'fake_name_2', 'fake_name_3'], 'fake_name'),
621 ('display_description~', 'display_description',
622 ['fake desc 1', 'fake desc 2', 'fake desc 3'], 'fake desc')
623 )
624 @ddt.unpack
625 def test_share_get_all_like_filters(
626 self, filter_name, key, share_values, like_value):
627 for value in share_values:
628 kwargs = {key: value}
629 db_utils.create_share(**kwargs)
630 db_utils.create_share(
631 display_name='irrelevant_name',
632 display_description='should not be queried')
633
634 filters = {filter_name: like_value}
635
636 results = db_api.share_get_all(self.ctxt, filters=filters)
637
638 self.assertEqual(len(share_values), len(results))
639
640 @ddt.data(None, 'writable')
641 def test_share_get_has_replicas_field(self, replication_type):
642 share = db_utils.create_share(replication_type=replication_type)
643
644 db_share = db_api.share_get(self.ctxt, share['id'])
645
646 self.assertIn('has_replicas', db_share)
647
648 @ddt.data({'with_share_data': False, 'with_share_server': False},
649 {'with_share_data': False, 'with_share_server': True},
650 {'with_share_data': True, 'with_share_server': False},
651 {'with_share_data': True, 'with_share_server': True})
652 @ddt.unpack
653 def test_share_replicas_get_all(self, with_share_data,
654 with_share_server):
655 share_server = db_utils.create_share_server()
656 share_1 = db_utils.create_share()
657 share_2 = db_utils.create_share()
658 db_utils.create_share_replica(
659 replica_state=constants.REPLICA_STATE_ACTIVE,
660 share_id=share_1['id'],
661 share_server_id=share_server['id'])
662 db_utils.create_share_replica(
663 replica_state=constants.REPLICA_STATE_IN_SYNC,
664 share_id=share_1['id'],
665 share_server_id=share_server['id'])
666 db_utils.create_share_replica(
667 replica_state=constants.REPLICA_STATE_OUT_OF_SYNC,
668 share_id=share_2['id'],
669 share_server_id=share_server['id'])
670 db_utils.create_share_replica(share_id=share_2['id'])
671 expected_ss_keys = {
672 'backend_details', 'host', 'id',
673 'share_network_subnet_id', 'status',
674 }
675 expected_share_keys = {
676 'project_id', 'share_type_id', 'display_name',
677 'name', 'share_proto', 'is_public',
678 'source_share_group_snapshot_member_id',
679 }
680 session = db_api.get_session()
681
682 with session.begin():
683 share_replicas = db_api.share_replicas_get_all(
684 self.ctxt, with_share_server=with_share_server,
685 with_share_data=with_share_data, session=session)
686
687 self.assertEqual(3, len(share_replicas))
688 for replica in share_replicas:
689 if with_share_server:
690 self.assertTrue(expected_ss_keys.issubset(
691 replica['share_server'].keys()))
692 else:
693 self.assertNotIn('share_server', replica.keys())
694 self.assertEqual(
695 with_share_data,
696 expected_share_keys.issubset(replica.keys()))
697
698 @ddt.data({'with_share_data': False, 'with_share_server': False},
699 {'with_share_data': False, 'with_share_server': True},
700 {'with_share_data': True, 'with_share_server': False},
701 {'with_share_data': True, 'with_share_server': True})
702 @ddt.unpack
703 def test_share_replicas_get_all_by_share(self, with_share_data,
704 with_share_server):
705 share_server = db_utils.create_share_server()
706 share = db_utils.create_share()
707 db_utils.create_share_replica(
708 replica_state=constants.REPLICA_STATE_ACTIVE,
709 share_id=share['id'],
710 share_server_id=share_server['id'])
711 db_utils.create_share_replica(
712 replica_state=constants.REPLICA_STATE_IN_SYNC,
713 share_id=share['id'],
714 share_server_id=share_server['id'])
715 db_utils.create_share_replica(
716 replica_state=constants.REPLICA_STATE_OUT_OF_SYNC,
717 share_id=share['id'],
718 share_server_id=share_server['id'])
719 expected_ss_keys = {
720 'backend_details', 'host', 'id',
721 'share_network_subnet_id', 'status',
722 }
723 expected_share_keys = {
724 'project_id', 'share_type_id', 'display_name',
725 'name', 'share_proto', 'is_public',
726 'source_share_group_snapshot_member_id',
727 }
728 session = db_api.get_session()
729
730 with session.begin():
731 share_replicas = db_api.share_replicas_get_all_by_share(
732 self.ctxt, share['id'],
733 with_share_server=with_share_server,
734 with_share_data=with_share_data, session=session)
735
736 self.assertEqual(3, len(share_replicas))
737 for replica in share_replicas:
738 if with_share_server:
739 self.assertTrue(expected_ss_keys.issubset(
740 replica['share_server'].keys()))
741 else:
742 self.assertNotIn('share_server', replica.keys())
743 self.assertEqual(with_share_data,
744 expected_share_keys.issubset(replica.keys()))
745
746 def test_share_replicas_get_available_active_replica(self):
747 share_server = db_utils.create_share_server()
748 share_1 = db_utils.create_share()
749 share_2 = db_utils.create_share()
750 share_3 = db_utils.create_share()
751 db_utils.create_share_replica(
752 id='Replica1',
753 share_id=share_1['id'],
754 status=constants.STATUS_AVAILABLE,
755 replica_state=constants.REPLICA_STATE_ACTIVE,
756 share_server_id=share_server['id'])
757 db_utils.create_share_replica(
758 id='Replica2',
759 status=constants.STATUS_AVAILABLE,
760 share_id=share_1['id'],
761 replica_state=constants.REPLICA_STATE_ACTIVE,
762 share_server_id=share_server['id'])
763 db_utils.create_share_replica(
764 id='Replica3',
765 status=constants.STATUS_AVAILABLE,
766 share_id=share_2['id'],
767 replica_state=constants.REPLICA_STATE_ACTIVE)
768 db_utils.create_share_replica(
769 id='Replica4',
770 status=constants.STATUS_ERROR,
771 share_id=share_2['id'],
772 replica_state=constants.REPLICA_STATE_ACTIVE)
773 db_utils.create_share_replica(
774 id='Replica5',
775 status=constants.STATUS_AVAILABLE,
776 share_id=share_2['id'],
777 replica_state=constants.REPLICA_STATE_IN_SYNC)
778 db_utils.create_share_replica(
779 id='Replica6',
780 share_id=share_3['id'],
781 status=constants.STATUS_AVAILABLE,
782 replica_state=constants.REPLICA_STATE_IN_SYNC)
783 session = db_api.get_session()
784 expected_ss_keys = {
785 'backend_details', 'host', 'id',
786 'share_network_subnet_id', 'status',
787 }
788 expected_share_keys = {
789 'project_id', 'share_type_id', 'display_name',
790 'name', 'share_proto', 'is_public',
791 'source_share_group_snapshot_member_id',
792 }
793
794 with session.begin():
795 replica_share_1 = (
796 db_api.share_replicas_get_available_active_replica(
797 self.ctxt, share_1['id'], with_share_server=True,
798 session=session)
799 )
800 replica_share_2 = (
801 db_api.share_replicas_get_available_active_replica(
802 self.ctxt, share_2['id'], with_share_data=True,
803 session=session)
804 )
805 replica_share_3 = (
806 db_api.share_replicas_get_available_active_replica(
807 self.ctxt, share_3['id'], session=session)
808 )
809
810 self.assertIn(replica_share_1.get('id'), ['Replica1', 'Replica2'])
811 self.assertTrue(expected_ss_keys.issubset(
812 replica_share_1['share_server'].keys()))
813 self.assertFalse(
814 expected_share_keys.issubset(replica_share_1.keys()))
815 self.assertEqual(replica_share_2.get('id'), 'Replica3')
816 self.assertFalse(replica_share_2['share_server'])
817 self.assertTrue(
818 expected_share_keys.issubset(replica_share_2.keys()))
819 self.assertIsNone(replica_share_3)
820
821 def test_share_replica_get_exception(self):
822 replica = db_utils.create_share_replica(share_id='FAKE_SHARE_ID')
823
824 self.assertRaises(exception.ShareReplicaNotFound,
825 db_api.share_replica_get,
826 self.ctxt, replica['id'])
827
828 def test_share_replica_get_without_share_data(self):
829 share = db_utils.create_share()
830 replica = db_utils.create_share_replica(
831 share_id=share['id'],
832 replica_state=constants.REPLICA_STATE_ACTIVE)
833 expected_extra_keys = {
834 'project_id', 'share_type_id', 'display_name',
835 'name', 'share_proto', 'is_public',
836 'source_share_group_snapshot_member_id',
837 }
838
839 share_replica = db_api.share_replica_get(self.ctxt, replica['id'])
840
841 self.assertIsNotNone(share_replica['replica_state'])
842 self.assertEqual(share['id'], share_replica['share_id'])
843 self.assertFalse(expected_extra_keys.issubset(share_replica.keys()))
844
845 def test_share_replica_get_with_share_data(self):
846 share = db_utils.create_share()
847 replica = db_utils.create_share_replica(
848 share_id=share['id'],
849 replica_state=constants.REPLICA_STATE_ACTIVE)
850 expected_extra_keys = {
851 'project_id', 'share_type_id', 'display_name',
852 'name', 'share_proto', 'is_public',
853 'source_share_group_snapshot_member_id',
854 }
855
856 share_replica = db_api.share_replica_get(
857 self.ctxt, replica['id'], with_share_data=True)
858
859 self.assertIsNotNone(share_replica['replica_state'])
860 self.assertEqual(share['id'], share_replica['share_id'])
861 self.assertTrue(expected_extra_keys.issubset(share_replica.keys()))
862
863 def test_share_replica_get_with_share_server(self):
864 session = db_api.get_session()
865 share_server = db_utils.create_share_server()
866 share = db_utils.create_share()
867 replica = db_utils.create_share_replica(
868 share_id=share['id'],
869 replica_state=constants.REPLICA_STATE_ACTIVE,
870 share_server_id=share_server['id']
871 )
872 expected_extra_keys = {
873 'backend_details', 'host', 'id',
874 'share_network_subnet_id', 'status',
875 }
876 with session.begin():
877 share_replica = db_api.share_replica_get(
878 self.ctxt, replica['id'], with_share_server=True,
879 session=session)
880
881 self.assertIsNotNone(share_replica['replica_state'])
882 self.assertEqual(
883 share_server['id'], share_replica['share_server_id'])
884 self.assertTrue(expected_extra_keys.issubset(
885 share_replica['share_server'].keys()))
886
887 def test_share_replica_update(self):
888 share = db_utils.create_share()
889 replica = db_utils.create_share_replica(
890 share_id=share['id'], replica_state=constants.REPLICA_STATE_ACTIVE)
891
892 updated_replica = db_api.share_replica_update(
893 self.ctxt, replica['id'],
894 {'replica_state': constants.REPLICA_STATE_OUT_OF_SYNC})
895
896 self.assertEqual(constants.REPLICA_STATE_OUT_OF_SYNC,
897 updated_replica['replica_state'])
898
899 def test_share_replica_delete(self):
900 share = db_utils.create_share()
901 share = db_api.share_get(self.ctxt, share['id'])
902 self.mock_object(quota.QUOTAS, 'reserve',
903 mock.Mock(return_value='reservation'))
904 self.mock_object(quota.QUOTAS, 'commit')
905 replica = db_utils.create_share_replica(
906 share_id=share['id'], replica_state=constants.REPLICA_STATE_ACTIVE)
907
908 self.assertEqual(1, len(
909 db_api.share_replicas_get_all_by_share(self.ctxt, share['id'])))
910
911 db_api.share_replica_delete(self.ctxt, replica['id'])
912
913 self.assertEqual(
914 [], db_api.share_replicas_get_all_by_share(self.ctxt, share['id']))
915 share_type_id = share['instances'][0].get('share_type_id', None)
916 quota.QUOTAS.reserve.assert_called_once_with(
917 self.ctxt, project_id=share['project_id'],
918 user_id=share['user_id'], share_type_id=share_type_id,
919 share_replicas=-1, replica_gigabytes=share['size'])
920 quota.QUOTAS.commit.assert_called_once_with(
921 self.ctxt, 'reservation', project_id=share['project_id'],
922 user_id=share['user_id'], share_type_id=share_type_id)
923
924 @ddt.data(
925 (True, {"share_replicas": -1, "replica_gigabytes": 0}, 'active'),
926 (False, {"shares": -1, "gigabytes": 0}, None),
927 (False, {"shares": -1, "gigabytes": 0,
928 "share_replicas": -1, "replica_gigabytes": 0}, 'active')
929 )
930 @ddt.unpack
931 def test_share_instance_delete_quota_error(self, is_replica, deltas,
932 replica_state):
933 share = db_utils.create_share(replica_state=replica_state)
934 share = db_api.share_get(self.ctxt, share['id'])
935 instance_id = share['instances'][0]['id']
936
937 if is_replica:
938 replica = db_utils.create_share_replica(
939 share_id=share['id'],
940 replica_state=constants.REPLICA_STATE_ACTIVE)
941 instance_id = replica['id']
942 reservation = 'fake'
943 share_type_id = share['instances'][0]['share_type_id']
944
945 self.mock_object(quota.QUOTAS, 'reserve',
946 mock.Mock(return_value=reservation))
947 self.mock_object(quota.QUOTAS, 'commit', mock.Mock(
948 side_effect=exception.QuotaError('fake')))
949 self.mock_object(quota.QUOTAS, 'rollback')
950
951 # NOTE(silvacarlose): not calling with assertRaises since the
952 # _update_share_instance_usages method is not raising an exception
953 db_api.share_instance_delete(
954 self.ctxt, instance_id, session=None, need_to_update_usages=True)
955
956 quota.QUOTAS.reserve.assert_called_once_with(
957 self.ctxt, project_id=share['project_id'],
958 user_id=share['user_id'], share_type_id=share_type_id, **deltas)
959 quota.QUOTAS.commit.assert_called_once_with(
960 self.ctxt, reservation, project_id=share['project_id'],
961 user_id=share['user_id'], share_type_id=share_type_id)
962 quota.QUOTAS.rollback.assert_called_once_with(
963 self.ctxt, reservation, share_type_id=share_type_id)
964
965 def test_share_instance_access_copy(self):
966 share = db_utils.create_share()
967 rules = []
968 for i in range(0, 5):
969 rules.append(db_utils.create_access(share_id=share['id']))
970
971 instance = db_utils.create_share_instance(share_id=share['id'])
972
973 share_access_rules = db_api.share_instance_access_copy(
974 self.ctxt, share['id'], instance['id'])
975 share_access_rule_ids = [a['id'] for a in share_access_rules]
976
977 self.assertEqual(5, len(share_access_rules))
978 for rule_id in share_access_rule_ids:
979 self.assertIsNotNone(
980 db_api.share_instance_access_get(
981 self.ctxt, rule_id, instance['id']))
982
983
984 @ddt.ddt
985 class ShareGroupDatabaseAPITestCase(test.TestCase):
986 def setUp(self):
987 """Run before each test."""
988 super(ShareGroupDatabaseAPITestCase, self).setUp()
989 self.ctxt = context.get_admin_context()
990
991 def test_share_group_create_with_share_type(self):
992 fake_share_types = ["fake_share_type"]
993 share_group = db_utils.create_share_group(share_types=fake_share_types)
994 share_group = db_api.share_group_get(self.ctxt, share_group['id'])
995
996 self.assertEqual(1, len(share_group['share_types']))
997
998 def test_share_group_get(self):
999 share_group = db_utils.create_share_group()
1000
1001 self.assertDictMatch(
1002 dict(share_group),
1003 dict(db_api.share_group_get(self.ctxt, share_group['id'])))
1004
1005 def test_count_share_groups_in_share_network(self):
1006 share_network = db_utils.create_share_network()
1007 db_utils.create_share_group()
1008 db_utils.create_share_group(share_network_id=share_network['id'])
1009
1010 count = db_api.count_share_groups_in_share_network(
1011 self.ctxt, share_network_id=share_network['id'])
1012
1013 self.assertEqual(1, count)
1014
1015 def test_share_group_get_all(self):
1016 expected_share_group = db_utils.create_share_group()
1017
1018 share_groups = db_api.share_group_get_all(self.ctxt, detailed=False)
1019
1020 self.assertEqual(1, len(share_groups))
1021 share_group = share_groups[0]
1022 self.assertEqual(2, len(dict(share_group).keys()))
1023 self.assertEqual(expected_share_group['id'], share_group['id'])
1024 self.assertEqual(expected_share_group['name'], share_group['name'])
1025
1026 def test_share_group_get_all_with_detail(self):
1027 expected_share_group = db_utils.create_share_group()
1028
1029 share_groups = db_api.share_group_get_all(self.ctxt, detailed=True)
1030
1031 self.assertEqual(1, len(share_groups))
1032 self.assertDictMatch(dict(expected_share_group), dict(share_groups[0]))
1033
1034 def test_share_group_get_all_by_host(self):
1035 fake_host = 'my_fake_host'
1036 expected_share_group = db_utils.create_share_group(host=fake_host)
1037 db_utils.create_share_group()
1038
1039 share_groups = db_api.share_group_get_all_by_host(
1040 self.ctxt, fake_host, detailed=False)
1041
1042 self.assertEqual(1, len(share_groups))
1043 share_group = share_groups[0]
1044 self.assertEqual(2, len(dict(share_group).keys()))
1045 self.assertEqual(expected_share_group['id'], share_group['id'])
1046 self.assertEqual(expected_share_group['name'], share_group['name'])
1047
1048 def test_share_group_get_all_by_host_with_details(self):
1049 fake_host = 'my_fake_host'
1050 expected_share_group = db_utils.create_share_group(host=fake_host)
1051 db_utils.create_share_group()
1052
1053 share_groups = db_api.share_group_get_all_by_host(
1054 self.ctxt, fake_host, detailed=True)
1055
1056 self.assertEqual(1, len(share_groups))
1057 share_group = share_groups[0]
1058 self.assertDictMatch(dict(expected_share_group), dict(share_group))
1059 self.assertEqual(fake_host, share_group['host'])
1060
1061 def test_share_group_get_all_by_project(self):
1062 fake_project = 'fake_project'
1063 expected_group = db_utils.create_share_group(
1064 project_id=fake_project)
1065 db_utils.create_share_group()
1066
1067 groups = db_api.share_group_get_all_by_project(self.ctxt,
1068 fake_project,
1069 detailed=False)
1070
1071 self.assertEqual(1, len(groups))
1072 group = groups[0]
1073 self.assertEqual(2, len(dict(group).keys()))
1074 self.assertEqual(expected_group['id'], group['id'])
1075 self.assertEqual(expected_group['name'], group['name'])
1076
1077 def test_share_group_get_all_by_share_server(self):
1078 fake_server = 123
1079 expected_group = db_utils.create_share_group(
1080 share_server_id=fake_server)
1081 db_utils.create_share_group()
1082
1083 groups = db_api.share_group_get_all_by_share_server(self.ctxt,
1084 fake_server)
1085
1086 self.assertEqual(1, len(groups))
1087 group = groups[0]
1088 self.assertEqual(expected_group['id'], group['id'])
1089 self.assertEqual(expected_group['name'], group['name'])
1090
1091 def test_share_group_get_all_by_project_with_details(self):
1092 fake_project = 'fake_project'
1093 expected_group = db_utils.create_share_group(
1094 project_id=fake_project)
1095 db_utils.create_share_group()
1096
1097 groups = db_api.share_group_get_all_by_project(self.ctxt,
1098 fake_project,
1099 detailed=True)
1100
1101 self.assertEqual(1, len(groups))
1102 group = groups[0]
1103 self.assertDictMatch(dict(expected_group), dict(group))
1104 self.assertEqual(fake_project, group['project_id'])
1105
1106 @ddt.data(({'name': 'fo'}, 0), ({'description': 'd'}, 0),
1107 ({'name': 'foo', 'description': 'd'}, 0),
1108 ({'name': 'foo'}, 1), ({'description': 'ds'}, 1),
1109 ({'name~': 'foo', 'description~': 'ds'}, 2),
1110 ({'name': 'foo', 'description~': 'ds'}, 1),
1111 ({'name~': 'foo', 'description': 'ds'}, 1))
1112 @ddt.unpack
1113 def test_share_group_get_all_by_name_and_description(
1114 self, search_opts, group_number):
1115 db_utils.create_share_group(name='fo1', description='d1')
1116 expected_group1 = db_utils.create_share_group(name='foo',
1117 description='ds')
1118 expected_group2 = db_utils.create_share_group(name='foo1',
1119 description='ds2')
1120
1121 groups = db_api.share_group_get_all(
1122 self.ctxt, detailed=True,
1123 filters=search_opts)
1124
1125 self.assertEqual(group_number, len(groups))
1126 if group_number == 1:
1127 self.assertDictMatch(dict(expected_group1), dict(groups[0]))
1128 elif group_number == 2:
1129 self.assertDictMatch(dict(expected_group1), dict(groups[1]))
1130 self.assertDictMatch(dict(expected_group2), dict(groups[0]))
1131
1132 def test_share_group_update(self):
1133 fake_name = "my_fake_name"
1134 expected_group = db_utils.create_share_group()
1135 expected_group['name'] = fake_name
1136
1137 db_api.share_group_update(self.ctxt,
1138 expected_group['id'],
1139 {'name': fake_name})
1140
1141 group = db_api.share_group_get(self.ctxt, expected_group['id'])
1142 self.assertEqual(fake_name, group['name'])
1143
1144 def test_share_group_destroy(self):
1145 group = db_utils.create_share_group()
1146 db_api.share_group_get(self.ctxt, group['id'])
1147
1148 db_api.share_group_destroy(self.ctxt, group['id'])
1149
1150 self.assertRaises(exception.NotFound, db_api.share_group_get,
1151 self.ctxt, group['id'])
1152
1153 def test_count_shares_in_share_group(self):
1154 sg = db_utils.create_share_group()
1155 db_utils.create_share(share_group_id=sg['id'])
1156 db_utils.create_share()
1157
1158 count = db_api.count_shares_in_share_group(self.ctxt, sg['id'])
1159
1160 self.assertEqual(1, count)
1161
1162 def test_count_sg_snapshots_in_share_group(self):
1163 sg = db_utils.create_share_group()
1164 db_utils.create_share_group_snapshot(sg['id'])
1165 db_utils.create_share_group_snapshot(sg['id'])
1166
1167 count = db_api.count_share_group_snapshots_in_share_group(
1168 self.ctxt, sg['id'])
1169
1170 self.assertEqual(2, count)
1171
1172 def test_share_group_snapshot_get(self):
1173 sg = db_utils.create_share_group()
1174 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1175
1176 self.assertDictMatch(
1177 dict(sg_snap),
1178 dict(db_api.share_group_snapshot_get(self.ctxt, sg_snap['id'])))
1179
1180 def test_share_group_snapshot_get_all(self):
1181 sg = db_utils.create_share_group()
1182 expected_sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1183
1184 snaps = db_api.share_group_snapshot_get_all(self.ctxt, detailed=False)
1185
1186 self.assertEqual(1, len(snaps))
1187 snap = snaps[0]
1188 self.assertEqual(2, len(dict(snap).keys()))
1189 self.assertEqual(expected_sg_snap['id'], snap['id'])
1190 self.assertEqual(expected_sg_snap['name'], snap['name'])
1191
1192 def test_share_group_snapshot_get_all_with_detail(self):
1193 sg = db_utils.create_share_group()
1194 expected_sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1195
1196 snaps = db_api.share_group_snapshot_get_all(self.ctxt, detailed=True)
1197
1198 self.assertEqual(1, len(snaps))
1199 snap = snaps[0]
1200 self.assertDictMatch(dict(expected_sg_snap), dict(snap))
1201
1202 def test_share_group_snapshot_get_all_by_project(self):
1203 fake_project = uuidutils.generate_uuid()
1204 sg = db_utils.create_share_group()
1205 expected_sg_snap = db_utils.create_share_group_snapshot(
1206 sg['id'], project_id=fake_project)
1207
1208 snaps = db_api.share_group_snapshot_get_all_by_project(
1209 self.ctxt, fake_project, detailed=False)
1210
1211 self.assertEqual(1, len(snaps))
1212 snap = snaps[0]
1213 self.assertEqual(2, len(dict(snap).keys()))
1214 self.assertEqual(expected_sg_snap['id'], snap['id'])
1215 self.assertEqual(expected_sg_snap['name'], snap['name'])
1216
1217 def test_share_group_snapshot_get_all_by_project_with_details(self):
1218 fake_project = uuidutils.generate_uuid()
1219 sg = db_utils.create_share_group()
1220 expected_sg_snap = db_utils.create_share_group_snapshot(
1221 sg['id'], project_id=fake_project)
1222
1223 snaps = db_api.share_group_snapshot_get_all_by_project(
1224 self.ctxt, fake_project, detailed=True)
1225
1226 self.assertEqual(1, len(snaps))
1227 snap = snaps[0]
1228 self.assertDictMatch(dict(expected_sg_snap), dict(snap))
1229 self.assertEqual(fake_project, snap['project_id'])
1230
1231 def test_share_group_snapshot_update(self):
1232 fake_name = "my_fake_name"
1233 sg = db_utils.create_share_group()
1234 expected_sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1235 expected_sg_snap['name'] = fake_name
1236
1237 db_api.share_group_snapshot_update(
1238 self.ctxt, expected_sg_snap['id'], {'name': fake_name})
1239
1240 sg_snap = db_api.share_group_snapshot_get(
1241 self.ctxt, expected_sg_snap['id'])
1242 self.assertEqual(fake_name, sg_snap['name'])
1243
1244 def test_share_group_snapshot_destroy(self):
1245 sg = db_utils.create_share_group()
1246 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1247 db_api.share_group_snapshot_get(self.ctxt, sg_snap['id'])
1248
1249 db_api.share_group_snapshot_destroy(self.ctxt, sg_snap['id'])
1250
1251 self.assertRaises(
1252 exception.NotFound,
1253 db_api.share_group_snapshot_get, self.ctxt, sg_snap['id'])
1254
1255 def test_share_group_snapshot_members_get_all(self):
1256 sg = db_utils.create_share_group()
1257 share = db_utils.create_share(share_group_id=sg['id'])
1258 si = db_utils.create_share_instance(share_id=share['id'])
1259 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1260 expected_member = db_utils.create_share_group_snapshot_member(
1261 sg_snap['id'], share_instance_id=si['id'])
1262
1263 members = db_api.share_group_snapshot_members_get_all(
1264 self.ctxt, sg_snap['id'])
1265
1266 self.assertEqual(1, len(members))
1267 self.assertDictMatch(dict(expected_member), dict(members[0]))
1268
1269 def test_count_share_group_snapshot_members_in_share(self):
1270 sg = db_utils.create_share_group()
1271 share = db_utils.create_share(share_group_id=sg['id'])
1272 si = db_utils.create_share_instance(share_id=share['id'])
1273 share2 = db_utils.create_share(share_group_id=sg['id'])
1274 si2 = db_utils.create_share_instance(share_id=share2['id'])
1275 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1276 db_utils.create_share_group_snapshot_member(
1277 sg_snap['id'], share_instance_id=si['id'])
1278 db_utils.create_share_group_snapshot_member(
1279 sg_snap['id'], share_instance_id=si2['id'])
1280
1281 count = db_api.count_share_group_snapshot_members_in_share(
1282 self.ctxt, share['id'])
1283
1284 self.assertEqual(1, count)
1285
1286 def test_share_group_snapshot_members_get(self):
1287 sg = db_utils.create_share_group()
1288 share = db_utils.create_share(share_group_id=sg['id'])
1289 si = db_utils.create_share_instance(share_id=share['id'])
1290 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1291 expected_member = db_utils.create_share_group_snapshot_member(
1292 sg_snap['id'], share_instance_id=si['id'])
1293
1294 member = db_api.share_group_snapshot_member_get(
1295 self.ctxt, expected_member['id'])
1296
1297 self.assertDictMatch(dict(expected_member), dict(member))
1298
1299 def test_share_group_snapshot_members_get_not_found(self):
1300 self.assertRaises(
1301 exception.ShareGroupSnapshotMemberNotFound,
1302 db_api.share_group_snapshot_member_get, self.ctxt, 'fake_id')
1303
1304 def test_share_group_snapshot_member_update(self):
1305 sg = db_utils.create_share_group()
1306 share = db_utils.create_share(share_group_id=sg['id'])
1307 si = db_utils.create_share_instance(share_id=share['id'])
1308 sg_snap = db_utils.create_share_group_snapshot(sg['id'])
1309 expected_member = db_utils.create_share_group_snapshot_member(
1310 sg_snap['id'], share_instance_id=si['id'])
1311
1312 db_api.share_group_snapshot_member_update(
1313 self.ctxt, expected_member['id'],
1314 {'status': constants.STATUS_AVAILABLE})
1315
1316 member = db_api.share_group_snapshot_member_get(
1317 self.ctxt, expected_member['id'])
1318 self.assertEqual(constants.STATUS_AVAILABLE, member['status'])
1319
1320
1321 @ddt.ddt
1322 class ShareGroupTypeAPITestCase(test.TestCase):
1323
1324 def setUp(self):
1325 super(ShareGroupTypeAPITestCase, self).setUp()
1326 self.ctxt = context.RequestContext(
1327 user_id='user_id', project_id='project_id', is_admin=True)
1328
1329 @ddt.data(True, False)
1330 def test_share_type_destroy_in_use(self, used_by_groups):
1331 share_type_1 = db_utils.create_share_type(name='fike')
1332 share_type_2 = db_utils.create_share_type(name='bowman')
1333 share_group_type_1 = db_utils.create_share_group_type(
1334 name='orange', is_public=False, share_types=[share_type_1['id']],
1335 group_specs={'dabo': 'allin', 'cadence': 'count'},
1336 override_defaults=True)
1337 db_api.share_group_type_access_add(self.ctxt,
1338 share_group_type_1['id'],
1339 "2018ndaetfigovnsaslcahfavmrpions")
1340 db_api.share_group_type_access_add(self.ctxt,
1341 share_group_type_1['id'],
1342 "2016ndaetfigovnsaslcahfavmrpions")
1343 share_group_type_2 = db_utils.create_share_group_type(
1344 name='regalia', share_types=[share_type_2['id']])
1345 if used_by_groups:
1346 share_group_1 = db_utils.create_share_group(
1347 share_group_type_id=share_group_type_1['id'],
1348 share_types=[share_type_1['id']])
1349 share_group_2 = db_utils.create_share_group(
1350 share_group_type_id=share_group_type_2['id'],
1351 share_types=[share_type_2['id']])
1352 self.assertRaises(exception.ShareGroupTypeInUse,
1353 db_api.share_group_type_destroy,
1354 self.ctxt, share_group_type_1['id'])
1355 self.assertRaises(exception.ShareGroupTypeInUse,
1356 db_api.share_group_type_destroy,
1357 self.ctxt, share_group_type_2['id'])
1358 # Cleanup share groups
1359 db_api.share_group_destroy(self.ctxt, share_group_1['id'])
1360 db_api.share_group_destroy(self.ctxt, share_group_2['id'])
1361
1362 # Let's cleanup share_group_type_1 and verify it is gone
1363 self.assertIsNone(db_api.share_group_type_destroy(
1364 self.ctxt, share_group_type_1['id']))
1365 self.assertDictMatch(
1366 {}, db_api.share_group_type_specs_get(
1367 self.ctxt, share_group_type_1['id']))
1368 self.assertRaises(exception.ShareGroupTypeNotFound,
1369 db_api.share_group_type_access_get_all,
1370 self.ctxt, share_group_type_1['id'])
1371 self.assertRaises(exception.ShareGroupTypeNotFound,
1372 db_api.share_group_type_get,
1373 self.ctxt, share_group_type_1['id'])
1374
1375 # share_group_type_2 must still be around
1376 self.assertEqual(share_group_type_2['id'],
1377 db_api.share_group_type_get(
1378 self.ctxt, share_group_type_2['id'])['id'])
1379
1380
1381 @ddt.ddt
1382 class ShareSnapshotDatabaseAPITestCase(test.TestCase):
1383
1384 def setUp(self):
1385 """Run before each test."""
1386 super(ShareSnapshotDatabaseAPITestCase, self).setUp()
1387 self.ctxt = context.get_admin_context()
1388
1389 self.share_instances = [
1390 db_utils.create_share_instance(
1391 status=constants.STATUS_REPLICATION_CHANGE,
1392 share_id='fake_share_id_1'),
1393 db_utils.create_share_instance(
1394 status=constants.STATUS_AVAILABLE,
1395 share_id='fake_share_id_1'),
1396 db_utils.create_share_instance(
1397 status=constants.STATUS_ERROR_DELETING,
1398 share_id='fake_share_id_2'),
1399 db_utils.create_share_instance(
1400 status=constants.STATUS_MANAGING,
1401 share_id='fake_share_id_2'),
1402 ]
1403 self.share_1 = db_utils.create_share(
1404 id='fake_share_id_1', instances=self.share_instances[0:2])
1405 self.share_2 = db_utils.create_share(
1406 id='fake_share_id_2', instances=self.share_instances[2:-1])
1407 self.snapshot_instances = [
1408 db_utils.create_snapshot_instance(
1409 'fake_snapshot_id_1',
1410 status=constants.STATUS_CREATING,
1411 share_instance_id=self.share_instances[0]['id']),
1412 db_utils.create_snapshot_instance(
1413 'fake_snapshot_id_1',
1414 status=constants.STATUS_ERROR,
1415 share_instance_id=self.share_instances[1]['id']),
1416 db_utils.create_snapshot_instance(
1417 'fake_snapshot_id_1',
1418 status=constants.STATUS_DELETING,
1419 share_instance_id=self.share_instances[2]['id']),
1420 db_utils.create_snapshot_instance(
1421 'fake_snapshot_id_2',
1422 status=constants.STATUS_AVAILABLE,
1423 id='fake_snapshot_instance_id',
1424 provider_location='hogsmeade:snapshot1',
1425 progress='87%',
1426 share_instance_id=self.share_instances[3]['id']),
1427 ]
1428 self.snapshot_1 = db_utils.create_snapshot(
1429 id='fake_snapshot_id_1', share_id=self.share_1['id'],
1430 instances=self.snapshot_instances[0:3])
1431 self.snapshot_2 = db_utils.create_snapshot(
1432 id='fake_snapshot_id_2', share_id=self.share_2['id'],
1433 instances=self.snapshot_instances[3:4])
1434
1435 self.snapshot_instance_export_locations = [
1436 db_utils.create_snapshot_instance_export_locations(
1437 self.snapshot_instances[0].id,
1438 path='1.1.1.1:/fake_path',
1439 is_admin_only=True),
1440 db_utils.create_snapshot_instance_export_locations(
1441 self.snapshot_instances[1].id,
1442 path='2.2.2.2:/fake_path',
1443 is_admin_only=True),
1444 db_utils.create_snapshot_instance_export_locations(
1445 self.snapshot_instances[2].id,
1446 path='3.3.3.3:/fake_path',
1447 is_admin_only=True),
1448 db_utils.create_snapshot_instance_export_locations(
1449 self.snapshot_instances[3].id,
1450 path='4.4.4.4:/fake_path',
1451 is_admin_only=True)
1452 ]
1453
1454 def test_create(self):
1455 share = db_utils.create_share(size=1)
1456 values = {
1457 'share_id': share['id'],
1458 'size': share['size'],
1459 'user_id': share['user_id'],
1460 'project_id': share['project_id'],
1461 'status': constants.STATUS_CREATING,
1462 'progress': '0%',
1463 'share_size': share['size'],
1464 'display_name': 'fake',
1465 'display_description': 'fake',
1466 'share_proto': share['share_proto']
1467 }
1468
1469 actual_result = db_api.share_snapshot_create(
1470 self.ctxt, values, create_snapshot_instance=True)
1471
1472 self.assertEqual(1, len(actual_result.instances))
1473 self.assertSubDictMatch(values, actual_result.to_dict())
1474
1475 def test_share_snapshot_get_latest_for_share(self):
1476
1477 share = db_utils.create_share(size=1)
1478 values = {
1479 'share_id': share['id'],
1480 'size': share['size'],
1481 'user_id': share['user_id'],
1482 'project_id': share['project_id'],
1483 'status': constants.STATUS_CREATING,
1484 'progress': '0%',
1485 'share_size': share['size'],
1486 'display_description': 'fake',
1487 'share_proto': share['share_proto'],
1488 }
1489 values1 = copy.deepcopy(values)
1490 values1['display_name'] = 'snap1'
1491 db_api.share_snapshot_create(self.ctxt, values1)
1492 values2 = copy.deepcopy(values)
1493 values2['display_name'] = 'snap2'
1494 db_api.share_snapshot_create(self.ctxt, values2)
1495 values3 = copy.deepcopy(values)
1496 values3['display_name'] = 'snap3'
1497 db_api.share_snapshot_create(self.ctxt, values3)
1498
1499 result = db_api.share_snapshot_get_latest_for_share(self.ctxt,
1500 share['id'])
1501
1502 self.assertSubDictMatch(values3, result.to_dict())
1503
1504 def test_get_instance(self):
1505 snapshot = db_utils.create_snapshot(with_share=True)
1506
1507 instance = db_api.share_snapshot_instance_get(
1508 self.ctxt, snapshot.instance['id'], with_share_data=True)
1509 instance_dict = instance.to_dict()
1510
1511 self.assertTrue(hasattr(instance, 'name'))
1512 self.assertTrue(hasattr(instance, 'share_name'))
1513 self.assertTrue(hasattr(instance, 'share_id'))
1514 self.assertIn('name', instance_dict)
1515 self.assertIn('share_name', instance_dict)
1516
1517 @ddt.data(None, constants.STATUS_ERROR)
1518 def test_share_snapshot_instance_get_all_with_filters_some(self, status):
1519 expected_status = status or (constants.STATUS_CREATING,
1520 constants.STATUS_DELETING)
1521 expected_number = 1 if status else 3
1522 filters = {
1523 'snapshot_ids': 'fake_snapshot_id_1',
1524 'statuses': expected_status
1525 }
1526 instances = db_api.share_snapshot_instance_get_all_with_filters(
1527 self.ctxt, filters)
1528
1529 for instance in instances:
1530 self.assertEqual('fake_snapshot_id_1', instance['snapshot_id'])
1531 self.assertIn(instance['status'], filters['statuses'])
1532
1533 self.assertEqual(expected_number, len(instances))
1534
1535 def test_share_snapshot_instance_get_all_with_filters_all_filters(self):
1536 filters = {
1537 'snapshot_ids': 'fake_snapshot_id_2',
1538 'instance_ids': 'fake_snapshot_instance_id',
1539 'statuses': constants.STATUS_AVAILABLE,
1540 'share_instance_ids': self.share_instances[3]['id'],
1541 }
1542 instances = db_api.share_snapshot_instance_get_all_with_filters(
1543 self.ctxt, filters, with_share_data=True)
1544 self.assertEqual(1, len(instances))
1545 self.assertEqual('fake_snapshot_instance_id', instances[0]['id'])
1546 self.assertEqual(
1547 self.share_2['id'], instances[0]['share_instance']['share_id'])
1548
1549 def test_share_snapshot_instance_get_all_with_filters_wrong_filters(self):
1550 filters = {
1551 'some_key': 'some_value',
1552 'some_other_key': 'some_other_value',
1553 }
1554 instances = db_api.share_snapshot_instance_get_all_with_filters(
1555 self.ctxt, filters)
1556 self.assertEqual(6, len(instances))
1557
1558 def test_share_snapshot_instance_create(self):
1559 snapshot = db_utils.create_snapshot(with_share=True)
1560 share = snapshot['share']
1561 share_instance = db_utils.create_share_instance(share_id=share['id'])
1562 values = {
1563 'snapshot_id': snapshot['id'],
1564 'share_instance_id': share_instance['id'],
1565 'status': constants.STATUS_MANAGING,
1566 'progress': '88%',
1567 'provider_location': 'whomping_willow',
1568 }
1569
1570 actual_result = db_api.share_snapshot_instance_create(
1571 self.ctxt, snapshot['id'], values)
1572
1573 snapshot = db_api.share_snapshot_get(self.ctxt, snapshot['id'])
1574
1575 self.assertSubDictMatch(values, actual_result.to_dict())
1576 self.assertEqual(2, len(snapshot['instances']))
1577
1578 def test_share_snapshot_instance_update(self):
1579 snapshot = db_utils.create_snapshot(with_share=True)
1580
1581 values = {
1582 'snapshot_id': snapshot['id'],
1583 'status': constants.STATUS_ERROR,
1584 'progress': '18%',
1585 'provider_location': 'godrics_hollow',
1586 }
1587
1588 actual_result = db_api.share_snapshot_instance_update(
1589 self.ctxt, snapshot['instance']['id'], values)
1590
1591 self.assertSubDictMatch(values, actual_result.to_dict())
1592
1593 @ddt.data(2, 1)
1594 def test_share_snapshot_instance_delete(self, instances):
1595 snapshot = db_utils.create_snapshot(with_share=True)
1596 first_instance_id = snapshot['instance']['id']
1597 if instances > 1:
1598 instance = db_utils.create_snapshot_instance(
1599 snapshot['id'],
1600 share_instance_id=snapshot['share']['instance']['id'])
1601 else:
1602 instance = snapshot['instance']
1603
1604 retval = db_api.share_snapshot_instance_delete(
1605 self.ctxt, instance['id'])
1606
1607 self.assertIsNone(retval)
1608 if instances == 1:
1609 self.assertRaises(exception.ShareSnapshotNotFound,
1610 db_api.share_snapshot_get,
1611 self.ctxt, snapshot['id'])
1612 else:
1613 snapshot = db_api.share_snapshot_get(self.ctxt, snapshot['id'])
1614 self.assertEqual(1, len(snapshot['instances']))
1615 self.assertEqual(first_instance_id, snapshot['instance']['id'])
1616
1617 def test_share_snapshot_access_create(self):
1618 values = {
1619 'share_snapshot_id': self.snapshot_1['id'],
1620 }
1621 actual_result = db_api.share_snapshot_access_create(self.ctxt,
1622 values)
1623
1624 self.assertSubDictMatch(values, actual_result.to_dict())
1625
1626 def test_share_snapshot_instance_access_get_all(self):
1627 access = db_utils.create_snapshot_access(
1628 share_snapshot_id=self.snapshot_1['id'])
1629 session = db_api.get_session()
1630 values = {'share_snapshot_instance_id': self.snapshot_instances[0].id,
1631 'access_id': access['id']}
1632
1633 rules = db_api.share_snapshot_instance_access_get_all(
1634 self.ctxt, access['id'], session)
1635
1636 self.assertSubDictMatch(values, rules[0].to_dict())
1637
1638 def test_share_snapshot_access_get(self):
1639 access = db_utils.create_snapshot_access(
1640 share_snapshot_id=self.snapshot_1['id'])
1641 values = {'share_snapshot_id': self.snapshot_1['id']}
1642
1643 actual_value = db_api.share_snapshot_access_get(
1644 self.ctxt, access['id'])
1645
1646 self.assertSubDictMatch(values, actual_value.to_dict())
1647
1648 def test_share_snapshot_access_get_all_for_share_snapshot(self):
1649 access = db_utils.create_snapshot_access(
1650 share_snapshot_id=self.snapshot_1['id'])
1651 values = {'access_type': access['access_type'],
1652 'access_to': access['access_to'],
1653 'share_snapshot_id': self.snapshot_1['id']}
1654
1655 actual_value = db_api.share_snapshot_access_get_all_for_share_snapshot(
1656 self.ctxt, self.snapshot_1['id'], {})
1657
1658 self.assertSubDictMatch(values, actual_value[0].to_dict())
1659
1660 @ddt.data({'existing': {'access_type': 'cephx', 'access_to': 'alice'},
1661 'new': {'access_type': 'user', 'access_to': 'alice'},
1662 'result': False},
1663 {'existing': {'access_type': 'user', 'access_to': 'bob'},
1664 'new': {'access_type': 'user', 'access_to': 'bob'},
1665 'result': True},
1666 {'existing': {'access_type': 'ip', 'access_to': '10.0.0.10/32'},
1667 'new': {'access_type': 'ip', 'access_to': '10.0.0.10'},
1668 'result': True},
1669 {'existing': {'access_type': 'ip', 'access_to': '10.10.0.11'},
1670 'new': {'access_type': 'ip', 'access_to': '10.10.0.11'},
1671 'result': True},
1672 {'existing': {'access_type': 'ip', 'access_to': 'fd21::11'},
1673 'new': {'access_type': 'ip', 'access_to': 'fd21::11'},
1674 'result': True},
1675 {'existing': {'access_type': 'ip', 'access_to': 'fd21::10'},
1676 'new': {'access_type': 'ip', 'access_to': 'fd21::10/128'},
1677 'result': True},
1678 {'existing': {'access_type': 'ip', 'access_to': '10.10.0.0/22'},
1679 'new': {'access_type': 'ip', 'access_to': '10.10.0.0/24'},
1680 'result': False},
1681 {'existing': {'access_type': 'ip', 'access_to': '2620:52::/48'},
1682 'new': {'access_type': 'ip',
1683 'access_to': '2620:52:0:13b8::/64'},
1684 'result': False})
1685 @ddt.unpack
1686 def test_share_snapshot_check_for_existing_access(self, existing, new,
1687 result):
1688 db_utils.create_snapshot_access(
1689 share_snapshot_id=self.snapshot_1['id'],
1690 access_type=existing['access_type'],
1691 access_to=existing['access_to'])
1692
1693 rule_exists = db_api.share_snapshot_check_for_existing_access(
1694 self.ctxt, self.snapshot_1['id'], new['access_type'],
1695 new['access_to'])
1696
1697 self.assertEqual(result, rule_exists)
1698
1699 def test_share_snapshot_access_get_all_for_snapshot_instance(self):
1700 access = db_utils.create_snapshot_access(
1701 share_snapshot_id=self.snapshot_1['id'])
1702 values = {'access_type': access['access_type'],
1703 'access_to': access['access_to'],
1704 'share_snapshot_id': self.snapshot_1['id']}
1705
1706 out = db_api.share_snapshot_access_get_all_for_snapshot_instance(
1707 self.ctxt, self.snapshot_instances[0].id)
1708
1709 self.assertSubDictMatch(values, out[0].to_dict())
1710
1711 def test_share_snapshot_instance_access_update_state(self):
1712 access = db_utils.create_snapshot_access(
1713 share_snapshot_id=self.snapshot_1['id'])
1714 values = {'state': constants.STATUS_ACTIVE,
1715 'access_id': access['id'],
1716 'share_snapshot_instance_id': self.snapshot_instances[0].id}
1717
1718 actual_result = db_api.share_snapshot_instance_access_update(
1719 self.ctxt, access['id'], self.snapshot_1.instance['id'],
1720 {'state': constants.STATUS_ACTIVE})
1721
1722 self.assertSubDictMatch(values, actual_result.to_dict())
1723
1724 def test_share_snapshot_instance_access_get(self):
1725 access = db_utils.create_snapshot_access(
1726 share_snapshot_id=self.snapshot_1['id'])
1727 values = {'access_id': access['id'],
1728 'share_snapshot_instance_id': self.snapshot_instances[0].id}
1729
1730 actual_result = db_api.share_snapshot_instance_access_get(
1731 self.ctxt, access['id'], self.snapshot_instances[0].id)
1732
1733 self.assertSubDictMatch(values, actual_result.to_dict())
1734
1735 def test_share_snapshot_instance_access_delete(self):
1736 access = db_utils.create_snapshot_access(
1737 share_snapshot_id=self.snapshot_1['id'])
1738
1739 db_api.share_snapshot_instance_access_delete(
1740 self.ctxt, access['id'], self.snapshot_1.instance['id'])
1741
1742 def test_share_snapshot_instance_export_location_create(self):
1743 values = {
1744 'share_snapshot_instance_id': self.snapshot_instances[0].id,
1745 }
1746
1747 actual_result = db_api.share_snapshot_instance_export_location_create(
1748 self.ctxt, values)
1749
1750 self.assertSubDictMatch(values, actual_result.to_dict())
1751
1752 def test_share_snapshot_export_locations_get(self):
1753 out = db_api.share_snapshot_export_locations_get(
1754 self.ctxt, self.snapshot_1['id'])
1755
1756 keys = ['share_snapshot_instance_id', 'path', 'is_admin_only']
1757 for expected, actual in zip(self.snapshot_instance_export_locations,
1758 out):
1759 [self.assertEqual(expected[k], actual[k]) for k in keys]
1760
1761 def test_share_snapshot_instance_export_locations_get(self):
1762 out = db_api.share_snapshot_instance_export_locations_get_all(
1763 self.ctxt, self.snapshot_instances[0].id)
1764
1765 keys = ['share_snapshot_instance_id', 'path', 'is_admin_only']
1766 for key in keys:
1767 self.assertEqual(self.snapshot_instance_export_locations[0][key],
1768 out[0][key])
1769
1770 def test_share_snapshot_instance_export_locations_update(self):
1771 snapshot = db_utils.create_snapshot(with_share=True)
1772 initial_locations = ['fake1/1/', 'fake2/2', 'fake3/3']
1773 update_locations = ['fake4/4', 'fake2/2', 'fake3/3']
1774
1775 # add initial locations
1776 db_api.share_snapshot_instance_export_locations_update(
1777 self.ctxt, snapshot.instance['id'], initial_locations, False)
1778 # update locations
1779 db_api.share_snapshot_instance_export_locations_update(
1780 self.ctxt, snapshot.instance['id'], update_locations, True)
1781
1782 get_result = db_api.share_snapshot_instance_export_locations_get_all(
1783 self.ctxt, snapshot.instance['id'])
1784 result_locations = [el['path'] for el in get_result]
1785
1786 self.assertEqual(sorted(result_locations), sorted(update_locations))
1787
1788 def test_share_snapshot_instance_export_locations_update_wrong_type(self):
1789 snapshot = db_utils.create_snapshot(with_share=True)
1790 new_export_locations = [1]
1791
1792 self.assertRaises(
1793 exception.ManilaException,
1794 db_api.share_snapshot_instance_export_locations_update,
1795 self.ctxt, snapshot.instance['id'], new_export_locations, False)
1796
1797
1798 class ShareExportLocationsDatabaseAPITestCase(test.TestCase):
1799
1800 def setUp(self):
1801 super(ShareExportLocationsDatabaseAPITestCase, self).setUp()
1802 self.ctxt = context.get_admin_context()
1803
1804 def test_update_valid_order(self):
1805 share = db_utils.create_share()
1806 initial_locations = ['fake1/1/', 'fake2/2', 'fake3/3']
1807 update_locations = ['fake4/4', 'fake2/2', 'fake3/3']
1808
1809 # add initial locations
1810 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
1811 initial_locations, False)
1812 # update locations
1813 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
1814 update_locations, True)
1815 actual_result = db_api.share_export_locations_get(self.ctxt,
1816 share['id'])
1817
1818 # actual result should contain locations in exact same order
1819 self.assertEqual(actual_result, update_locations)
1820
1821 def test_update_string(self):
1822 share = db_utils.create_share()
1823 initial_location = 'fake1/1/'
1824
1825 db_api.share_export_locations_update(self.ctxt, share.instance['id'],
1826 initial_location, False)
1827 actual_result = db_api.share_export_locations_get(self.ctxt,
1828 share['id'])
1829
1830 self.assertEqual(actual_result, [initial_location])
1831
1832 def test_get_admin_export_locations(self):
1833 ctxt_user = context.RequestContext(
1834 user_id='fake user', project_id='fake project', is_admin=False)
1835 share = db_utils.create_share()
1836 locations = [
1837 {'path': 'fake1/1/', 'is_admin_only': True},
1838 {'path': 'fake2/2/', 'is_admin_only': True},
1839 {'path': 'fake3/3/', 'is_admin_only': True},
1840 ]
1841
1842 db_api.share_export_locations_update(
1843 self.ctxt, share.instance['id'], locations, delete=False)
1844
1845 user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
1846 self.assertEqual([], user_result)
1847
1848 admin_result = db_api.share_export_locations_get(
1849 self.ctxt, share['id'])
1850 self.assertEqual(3, len(admin_result))
1851 for location in locations:
1852 self.assertIn(location['path'], admin_result)
1853
1854 def test_get_user_export_locations(self):
1855 ctxt_user = context.RequestContext(
1856 user_id='fake user', project_id='fake project', is_admin=False)
1857 share = db_utils.create_share()
1858 locations = [
1859 {'path': 'fake1/1/', 'is_admin_only': False},
1860 {'path': 'fake2/2/', 'is_admin_only': False},
1861 {'path': 'fake3/3/', 'is_admin_only': False},
1862 ]
1863
1864 db_api.share_export_locations_update(
1865 self.ctxt, share.instance['id'], locations, delete=False)
1866
1867 user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
1868 self.assertEqual(3, len(user_result))
1869 for location in locations:
1870 self.assertIn(location['path'], user_result)
1871
1872 admin_result = db_api.share_export_locations_get(
1873 self.ctxt, share['id'])
1874 self.assertEqual(3, len(admin_result))
1875 for location in locations:
1876 self.assertIn(location['path'], admin_result)
1877
1878 def test_get_user_export_locations_old_view(self):
1879 ctxt_user = context.RequestContext(
1880 user_id='fake user', project_id='fake project', is_admin=False)
1881 share = db_utils.create_share()
1882 locations = ['fake1/1/', 'fake2/2', 'fake3/3']
1883
1884 db_api.share_export_locations_update(
1885 self.ctxt, share.instance['id'], locations, delete=False)
1886
1887 user_result = db_api.share_export_locations_get(ctxt_user, share['id'])
1888 self.assertEqual(locations, user_result)
1889
1890 admin_result = db_api.share_export_locations_get(
1891 self.ctxt, share['id'])
1892 self.assertEqual(locations, admin_result)
1893
1894
1895 @ddt.ddt
1896 class ShareInstanceExportLocationsMetadataDatabaseAPITestCase(test.TestCase):
1897
1898 def setUp(self):
1899 clname = ShareInstanceExportLocationsMetadataDatabaseAPITestCase
1900 super(clname, self).setUp()
1901 self.ctxt = context.get_admin_context()
1902 share_id = 'fake_share_id'
1903 instances = [
1904 db_utils.create_share_instance(
1905 share_id=share_id,
1906 status=constants.STATUS_AVAILABLE),
1907 db_utils.create_share_instance(
1908 share_id=share_id,
1909 status=constants.STATUS_MIGRATING),
1910 db_utils.create_share_instance(
1911 share_id=share_id,
1912 status=constants.STATUS_MIGRATING_TO),
1913 ]
1914 self.share = db_utils.create_share(
1915 id=share_id,
1916 instances=instances)
1917 self.initial_locations = ['/fake/foo/', '/fake/bar', '/fake/quuz']
1918 self.shown_locations = ['/fake/foo/', '/fake/bar']
1919 for i in range(0, 3):
1920 db_api.share_export_locations_update(
1921 self.ctxt, instances[i]['id'], self.initial_locations[i],
1922 delete=False)
1923
1924 def _get_export_location_uuid_by_path(self, path):
1925 els = db_api.share_export_locations_get_by_share_id(
1926 self.ctxt, self.share.id)
1927 export_location_uuid = None
1928 for el in els:
1929 if el.path == path:
1930 export_location_uuid = el.uuid
1931 self.assertIsNotNone(export_location_uuid)
1932 return export_location_uuid
1933
1934 def test_get_export_locations_by_share_id(self):
1935 els = db_api.share_export_locations_get_by_share_id(
1936 self.ctxt, self.share.id)
1937 self.assertEqual(3, len(els))
1938 for path in self.shown_locations:
1939 self.assertTrue(any([path in el.path for el in els]))
1940
1941 def test_get_export_locations_by_share_id_ignore_migration_dest(self):
1942 els = db_api.share_export_locations_get_by_share_id(
1943 self.ctxt, self.share.id, ignore_migration_destination=True)
1944 self.assertEqual(2, len(els))
1945 for path in self.shown_locations:
1946 self.assertTrue(any([path in el.path for el in els]))
1947
1948 def test_get_export_locations_by_share_instance_id(self):
1949 els = db_api.share_export_locations_get_by_share_instance_id(
1950 self.ctxt, self.share.instance.id)
1951 self.assertEqual(1, len(els))
1952 for path in [self.shown_locations[1]]:
1953 self.assertTrue(any([path in el.path for el in els]))
1954
1955 def test_export_location_metadata_update_delete(self):
1956 export_location_uuid = self._get_export_location_uuid_by_path(
1957 self.initial_locations[0])
1958 metadata = {
1959 'foo_key': 'foo_value',
1960 'bar_key': 'bar_value',
1961 'quuz_key': 'quuz_value',
1962 }
1963
1964 db_api.export_location_metadata_update(
1965 self.ctxt, export_location_uuid, metadata, False)
1966
1967 db_api.export_location_metadata_delete(
1968 self.ctxt, export_location_uuid, list(metadata.keys())[0:-1])
1969
1970 result = db_api.export_location_metadata_get(
1971 self.ctxt, export_location_uuid)
1972
1973 key = list(metadata.keys())[-1]
1974 self.assertEqual({key: metadata[key]}, result)
1975
1976 db_api.export_location_metadata_delete(
1977 self.ctxt, export_location_uuid)
1978
1979 result = db_api.export_location_metadata_get(
1980 self.ctxt, export_location_uuid)
1981 self.assertEqual({}, result)
1982
1983 def test_export_location_metadata_update_get(self):
1984
1985 # Write metadata for target export location
1986 export_location_uuid = self._get_export_location_uuid_by_path(
1987 self.initial_locations[0])
1988 metadata = {'foo_key': 'foo_value', 'bar_key': 'bar_value'}
1989 db_api.export_location_metadata_update(
1990 self.ctxt, export_location_uuid, metadata, False)
1991
1992 # Write metadata for some concurrent export location
1993 other_export_location_uuid = self._get_export_location_uuid_by_path(
1994 self.initial_locations[1])
1995 other_metadata = {'key_from_other_el': 'value_of_key_from_other_el'}
1996 db_api.export_location_metadata_update(
1997 self.ctxt, other_export_location_uuid, other_metadata, False)
1998
1999 result = db_api.export_location_metadata_get(
2000 self.ctxt, export_location_uuid)
2001
2002 self.assertEqual(metadata, result)
2003
2004 updated_metadata = {
2005 'foo_key': metadata['foo_key'],
2006 'quuz_key': 'quuz_value',
2007 }
2008
2009 db_api.export_location_metadata_update(
2010 self.ctxt, export_location_uuid, updated_metadata, True)
2011
2012 result = db_api.export_location_metadata_get(
2013 self.ctxt, export_location_uuid)
2014
2015 self.assertEqual(updated_metadata, result)
2016
2017 @ddt.data(
2018 ("k", "v"),
2019 ("k" * 256, "v"),
2020 ("k", "v" * 1024),
2021 ("k" * 256, "v" * 1024),
2022 )
2023 @ddt.unpack
2024 def test_set_metadata_with_different_length(self, key, value):
2025 export_location_uuid = self._get_export_location_uuid_by_path(
2026 self.initial_locations[1])
2027 metadata = {key: value}
2028
2029 db_api.export_location_metadata_update(
2030 self.ctxt, export_location_uuid, metadata, False)
2031
2032 result = db_api.export_location_metadata_get(
2033 self.ctxt, export_location_uuid)
2034
2035 self.assertEqual(metadata, result)
2036
2037
2038 @ddt.ddt
2039 class DriverPrivateDataDatabaseAPITestCase(test.TestCase):
2040
2041 def setUp(self):
2042 """Run before each test."""
2043 super(DriverPrivateDataDatabaseAPITestCase, self).setUp()
2044 self.ctxt = context.get_admin_context()
2045
2046 def _get_driver_test_data(self):
2047 return uuidutils.generate_uuid()
2048
2049 @ddt.data({"details": {"foo": "bar", "tee": "too"},
2050 "valid": {"foo": "bar", "tee": "too"}},
2051 {"details": {"foo": "bar", "tee": ["test"]},
2052 "valid": {"foo": "bar", "tee": six.text_type(["test"])}})
2053 @ddt.unpack
2054 def test_update(self, details, valid):
2055 test_id = self._get_driver_test_data()
2056
2057 initial_data = db_api.driver_private_data_get(self.ctxt, test_id)
2058 db_api.driver_private_data_update(self.ctxt, test_id, details)
2059 actual_data = db_api.driver_private_data_get(self.ctxt, test_id)
2060
2061 self.assertEqual({}, initial_data)
2062 self.assertEqual(valid, actual_data)
2063
2064 @ddt.data({'with_deleted': True, 'append': False},
2065 {'with_deleted': True, 'append': True},
2066 {'with_deleted': False, 'append': False},
2067 {'with_deleted': False, 'append': True})
2068 @ddt.unpack
2069 def test_update_with_more_values(self, with_deleted, append):
2070 test_id = self._get_driver_test_data()
2071 details = {"tee": "too"}
2072 more_details = {"foo": "bar"}
2073 result = {"tee": "too", "foo": "bar"}
2074
2075 db_api.driver_private_data_update(self.ctxt, test_id, details)
2076 if with_deleted:
2077 db_api.driver_private_data_delete(self.ctxt, test_id)
2078 if append:
2079 more_details.update(details)
2080 if with_deleted and not append:
2081 result.pop("tee")
2082 db_api.driver_private_data_update(self.ctxt, test_id, more_details)
2083
2084 actual_result = db_api.driver_private_data_get(self.ctxt,
2085 test_id)
2086
2087 self.assertEqual(result, actual_result)
2088
2089 @ddt.data(True, False)
2090 def test_update_with_duplicate(self, with_deleted):
2091 test_id = self._get_driver_test_data()
2092 details = {"tee": "too"}
2093
2094 db_api.driver_private_data_update(self.ctxt, test_id, details)
2095 if with_deleted:
2096 db_api.driver_private_data_delete(self.ctxt, test_id)
2097 db_api.driver_private_data_update(self.ctxt, test_id, details)
2098
2099 actual_result = db_api.driver_private_data_get(self.ctxt,
2100 test_id)
2101
2102 self.assertEqual(details, actual_result)
2103
2104 def test_update_with_delete_existing(self):
2105 test_id = self._get_driver_test_data()
2106 details = {"key1": "val1", "key2": "val2", "key3": "val3"}
2107 details_update = {"key1": "val1_upd", "key4": "new_val"}
2108
2109 # Create new details
2110 db_api.driver_private_data_update(self.ctxt, test_id, details)
2111 db_api.driver_private_data_update(self.ctxt, test_id,
2112 details_update, delete_existing=True)
2113
2114 actual_result = db_api.driver_private_data_get(
2115 self.ctxt, test_id)
2116
2117 self.assertEqual(details_update, actual_result)
2118
2119 def test_get(self):
2120 test_id = self._get_driver_test_data()
2121 test_key = "foo"
2122 test_keys = [test_key, "tee"]
2123 details = {test_keys[0]: "val", test_keys[1]: "val", "mee": "foo"}
2124 db_api.driver_private_data_update(self.ctxt, test_id, details)
2125
2126 actual_result_all = db_api.driver_private_data_get(
2127 self.ctxt, test_id)
2128 actual_result_single_key = db_api.driver_private_data_get(
2129 self.ctxt, test_id, test_key)
2130 actual_result_list = db_api.driver_private_data_get(
2131 self.ctxt, test_id, test_keys)
2132
2133 self.assertEqual(details, actual_result_all)
2134 self.assertEqual(details[test_key], actual_result_single_key)
2135 self.assertEqual(dict.fromkeys(test_keys, "val"), actual_result_list)
2136
2137 def test_delete_single(self):
2138 test_id = self._get_driver_test_data()
2139 test_key = "foo"
2140 details = {test_key: "bar", "tee": "too"}
2141 valid_result = {"tee": "too"}
2142 db_api.driver_private_data_update(self.ctxt, test_id, details)
2143
2144 db_api.driver_private_data_delete(self.ctxt, test_id, test_key)
2145
2146 actual_result = db_api.driver_private_data_get(
2147 self.ctxt, test_id)
2148
2149 self.assertEqual(valid_result, actual_result)
2150
2151 def test_delete_all(self):
2152 test_id = self._get_driver_test_data()
2153 details = {"foo": "bar", "tee": "too"}
2154 db_api.driver_private_data_update(self.ctxt, test_id, details)
2155
2156 db_api.driver_private_data_delete(self.ctxt, test_id)
2157
2158 actual_result = db_api.driver_private_data_get(
2159 self.ctxt, test_id)
2160
2161 self.assertEqual({}, actual_result)
2162
2163
2164 @ddt.ddt
2165 class ShareNetworkDatabaseAPITestCase(BaseDatabaseAPITestCase):
2166
2167 def __init__(self, *args, **kwargs):
2168 super(ShareNetworkDatabaseAPITestCase, self).__init__(*args, **kwargs)
2169 self.fake_context = context.RequestContext(user_id='fake user',
2170 project_id='fake project',
2171 is_admin=False)
2172
2173 def setUp(self):
2174 super(ShareNetworkDatabaseAPITestCase, self).setUp()
2175 self.share_nw_dict = {'id': 'fake network id',
2176 'project_id': self.fake_context.project_id,
2177 'user_id': 'fake_user_id',
2178 'name': 'whatever',
2179 'description': 'fake description'}
2180
2181 def test_create_one_network(self):
2182 result = db_api.share_network_create(self.fake_context,
2183 self.share_nw_dict)
2184
2185 self._check_fields(expected=self.share_nw_dict, actual=result)
2186 self.assertEqual(0, len(result['share_instances']))
2187 self.assertEqual(0, len(result['security_services']))
2188
2189 def test_create_two_networks_in_different_tenants(self):
2190 share_nw_dict2 = self.share_nw_dict.copy()
2191 share_nw_dict2['id'] = None
2192 share_nw_dict2['project_id'] = 'fake project 2'
2193 result1 = db_api.share_network_create(self.fake_context,
2194 self.share_nw_dict)
2195 result2 = db_api.share_network_create(self.fake_context.elevated(),
2196 share_nw_dict2)
2197
2198 self._check_fields(expected=self.share_nw_dict, actual=result1)
2199 self._check_fields(expected=share_nw_dict2, actual=result2)
2200
2201 def test_create_two_networks_in_one_tenant(self):
2202 share_nw_dict2 = self.share_nw_dict.copy()
2203 share_nw_dict2['id'] += "suffix"
2204 result1 = db_api.share_network_create(self.fake_context,
2205 self.share_nw_dict)
2206 result2 = db_api.share_network_create(self.fake_context,
2207 share_nw_dict2)
2208 self._check_fields(expected=self.share_nw_dict, actual=result1)
2209 self._check_fields(expected=share_nw_dict2, actual=result2)
2210
2211 def test_create_with_duplicated_id(self):
2212 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2213
2214 self.assertRaises(db_exception.DBDuplicateEntry,
2215 db_api.share_network_create,
2216 self.fake_context,
2217 self.share_nw_dict)
2218
2219 def test_get(self):
2220 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2221 result = db_api.share_network_get(self.fake_context,
2222 self.share_nw_dict['id'])
2223
2224 self._check_fields(expected=self.share_nw_dict, actual=result)
2225 self.assertEqual(0, len(result['share_instances']))
2226 self.assertEqual(0, len(result['security_services']))
2227
2228 def _create_share_network_for_project(self, project_id):
2229 ctx = context.RequestContext(user_id='fake user',
2230 project_id=project_id,
2231 is_admin=False)
2232
2233 share_data = self.share_nw_dict.copy()
2234 share_data['project_id'] = project_id
2235
2236 db_api.share_network_create(ctx, share_data)
2237 return share_data
2238
2239 def test_get_other_tenant_as_admin(self):
2240 expected = self._create_share_network_for_project('fake project 2')
2241 result = db_api.share_network_get(self.fake_context.elevated(),
2242 self.share_nw_dict['id'])
2243
2244 self._check_fields(expected=expected, actual=result)
2245 self.assertEqual(0, len(result['share_instances']))
2246 self.assertEqual(0, len(result['security_services']))
2247
2248 def test_get_other_tenant(self):
2249 self._create_share_network_for_project('fake project 2')
2250 self.assertRaises(exception.ShareNetworkNotFound,
2251 db_api.share_network_get,
2252 self.fake_context,
2253 self.share_nw_dict['id'])
2254
2255 @ddt.data([{'id': 'fake share id1'}],
2256 [{'id': 'fake share id1'}, {'id': 'fake share id2'}],)
2257 def test_get_with_shares(self, shares):
2258 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2259 share_instances = []
2260 for share in shares:
2261 share.update({'share_network_id': self.share_nw_dict['id']})
2262 share_instances.append(
2263 db_api.share_create(self.fake_context, share).instance
2264 )
2265
2266 result = db_api.share_network_get(self.fake_context,
2267 self.share_nw_dict['id'])
2268
2269 self.assertEqual(len(shares), len(result['share_instances']))
2270 for index, share_instance in enumerate(share_instances):
2271 self.assertEqual(
2272 share_instance['share_network_id'],
2273 result['share_instances'][index]['share_network_id']
2274 )
2275
2276 @ddt.data([{'id': 'fake security service id1', 'type': 'fake type'}],
2277 [{'id': 'fake security service id1', 'type': 'fake type'},
2278 {'id': 'fake security service id2', 'type': 'fake type'}])
2279 def test_get_with_security_services(self, security_services):
2280 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2281
2282 for service in security_services:
2283 service.update({'project_id': self.fake_context.project_id})
2284 db_api.security_service_create(self.fake_context, service)
2285 db_api.share_network_add_security_service(
2286 self.fake_context, self.share_nw_dict['id'], service['id'])
2287
2288 result = db_api.share_network_get(self.fake_context,
2289 self.share_nw_dict['id'])
2290
2291 self.assertEqual(len(security_services),
2292 len(result['security_services']))
2293
2294 for index, service in enumerate(security_services):
2295 self._check_fields(expected=service,
2296 actual=result['security_services'][index])
2297
2298 @ddt.data([{'id': 'fake_id_1', 'availability_zone_id': 'None'}],
2299 [{'id': 'fake_id_2', 'availability_zone_id': 'None'},
2300 {'id': 'fake_id_3', 'availability_zone_id': 'fake_az_id'}])
2301 def test_get_with_subnets(self, subnets):
2302 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2303
2304 for subnet in subnets:
2305 subnet['share_network_id'] = self.share_nw_dict['id']
2306 db_api.share_network_subnet_create(self.fake_context, subnet)
2307
2308 result = db_api.share_network_get(self.fake_context,
2309 self.share_nw_dict['id'])
2310
2311 self.assertEqual(len(subnets),
2312 len(result['share_network_subnets']))
2313
2314 for index, subnet in enumerate(subnets):
2315 self._check_fields(expected=subnet,
2316 actual=result['share_network_subnets'][index])
2317
2318 def test_get_not_found(self):
2319 self.assertRaises(exception.ShareNetworkNotFound,
2320 db_api.share_network_get,
2321 self.fake_context,
2322 'fake id')
2323
2324 def test_delete(self):
2325 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2326 db_api.share_network_delete(self.fake_context,
2327 self.share_nw_dict['id'])
2328
2329 self.assertRaises(exception.ShareNetworkNotFound,
2330 db_api.share_network_get,
2331 self.fake_context,
2332 self.share_nw_dict['id'])
2333
2334 def test_delete_not_found(self):
2335 self.assertRaises(exception.ShareNetworkNotFound,
2336 db_api.share_network_delete,
2337 self.fake_context,
2338 'fake id')
2339
2340 def test_update(self):
2341 new_name = 'fake_new_name'
2342 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2343 result_update = db_api.share_network_update(self.fake_context,
2344 self.share_nw_dict['id'],
2345 {'name': new_name})
2346 result_get = db_api.share_network_get(self.fake_context,
2347 self.share_nw_dict['id'])
2348
2349 self.assertEqual(new_name, result_update['name'])
2350 self._check_fields(expected=dict(result_update.items()),
2351 actual=dict(result_get.items()))
2352
2353 def test_update_not_found(self):
2354 self.assertRaises(exception.ShareNetworkNotFound,
2355 db_api.share_network_update,
2356 self.fake_context,
2357 'fake id',
2358 {})
2359
2360 @ddt.data(1, 2)
2361 def test_get_all_one_record(self, records_count):
2362 index = 0
2363 share_networks = []
2364 while index < records_count:
2365 share_network_dict = dict(self.share_nw_dict)
2366 fake_id = 'fake_id%s' % index
2367 share_network_dict.update({'id': fake_id,
2368 'project_id': fake_id})
2369 share_networks.append(share_network_dict)
2370 db_api.share_network_create(self.fake_context.elevated(),
2371 share_network_dict)
2372 index += 1
2373
2374 result = db_api.share_network_get_all(self.fake_context.elevated())
2375
2376 self.assertEqual(len(share_networks), len(result))
2377 for index, net in enumerate(share_networks):
2378 self._check_fields(expected=net, actual=result[index])
2379
2380 def test_get_all_by_project(self):
2381 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2382
2383 share_nw_dict2 = dict(self.share_nw_dict)
2384 share_nw_dict2['id'] = 'fake share nw id2'
2385 share_nw_dict2['project_id'] = 'fake project 2'
2386 new_context = context.RequestContext(user_id='fake user 2',
2387 project_id='fake project 2',
2388 is_admin=False)
2389 db_api.share_network_create(new_context, share_nw_dict2)
2390
2391 result = db_api.share_network_get_all_by_project(
2392 self.fake_context.elevated(),
2393 share_nw_dict2['project_id'])
2394
2395 self.assertEqual(1, len(result))
2396 self._check_fields(expected=share_nw_dict2, actual=result[0])
2397
2398 def test_add_security_service(self):
2399 security_dict1 = {'id': 'fake security service id1',
2400 'project_id': self.fake_context.project_id,
2401 'type': 'fake type'}
2402
2403 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2404 db_api.security_service_create(self.fake_context, security_dict1)
2405 db_api.share_network_add_security_service(self.fake_context,
2406 self.share_nw_dict['id'],
2407 security_dict1['id'])
2408
2409 result = (db_api.model_query(
2410 self.fake_context,
2411 models.ShareNetworkSecurityServiceAssociation).
2412 filter_by(security_service_id=security_dict1['id']).
2413 filter_by(share_network_id=self.share_nw_dict['id']).
2414 first())
2415
2416 self.assertIsNotNone(result)
2417
2418 def test_add_security_service_not_found_01(self):
2419 security_service_id = 'unknown security service'
2420 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2421
2422 self.assertRaises(exception.SecurityServiceNotFound,
2423 db_api.share_network_add_security_service,
2424 self.fake_context,
2425 self.share_nw_dict['id'],
2426 security_service_id)
2427
2428 def test_add_security_service_not_found_02(self):
2429 security_dict1 = {'id': 'fake security service id1',
2430 'project_id': self.fake_context.project_id,
2431 'type': 'fake type'}
2432 share_nw_id = 'unknown share network'
2433 db_api.security_service_create(self.fake_context, security_dict1)
2434
2435 self.assertRaises(exception.ShareNetworkNotFound,
2436 db_api.share_network_add_security_service,
2437 self.fake_context,
2438 share_nw_id,
2439 security_dict1['id'])
2440
2441 def test_add_security_service_association_error_already_associated(self):
2442 security_dict1 = {'id': 'fake security service id1',
2443 'project_id': self.fake_context.project_id,
2444 'type': 'fake type'}
2445
2446 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2447 db_api.security_service_create(self.fake_context, security_dict1)
2448 db_api.share_network_add_security_service(self.fake_context,
2449 self.share_nw_dict['id'],
2450 security_dict1['id'])
2451
2452 self.assertRaises(
2453 exception.ShareNetworkSecurityServiceAssociationError,
2454 db_api.share_network_add_security_service,
2455 self.fake_context,
2456 self.share_nw_dict['id'],
2457 security_dict1['id'])
2458
2459 def test_remove_security_service(self):
2460 security_dict1 = {'id': 'fake security service id1',
2461 'project_id': self.fake_context.project_id,
2462 'type': 'fake type'}
2463
2464 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2465 db_api.security_service_create(self.fake_context, security_dict1)
2466 db_api.share_network_add_security_service(self.fake_context,
2467 self.share_nw_dict['id'],
2468 security_dict1['id'])
2469
2470 db_api.share_network_remove_security_service(self.fake_context,
2471 self.share_nw_dict['id'],
2472 security_dict1['id'])
2473
2474 result = (db_api.model_query(
2475 self.fake_context,
2476 models.ShareNetworkSecurityServiceAssociation).
2477 filter_by(security_service_id=security_dict1['id']).
2478 filter_by(share_network_id=self.share_nw_dict['id']).first())
2479
2480 self.assertIsNone(result)
2481
2482 share_nw_ref = db_api.share_network_get(self.fake_context,
2483 self.share_nw_dict['id'])
2484 self.assertEqual(0, len(share_nw_ref['security_services']))
2485
2486 def test_remove_security_service_not_found_01(self):
2487 security_service_id = 'unknown security service'
2488 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2489
2490 self.assertRaises(exception.SecurityServiceNotFound,
2491 db_api.share_network_remove_security_service,
2492 self.fake_context,
2493 self.share_nw_dict['id'],
2494 security_service_id)
2495
2496 def test_remove_security_service_not_found_02(self):
2497 security_dict1 = {'id': 'fake security service id1',
2498 'project_id': self.fake_context.project_id,
2499 'type': 'fake type'}
2500 share_nw_id = 'unknown share network'
2501 db_api.security_service_create(self.fake_context, security_dict1)
2502
2503 self.assertRaises(exception.ShareNetworkNotFound,
2504 db_api.share_network_remove_security_service,
2505 self.fake_context,
2506 share_nw_id,
2507 security_dict1['id'])
2508
2509 def test_remove_security_service_dissociation_error(self):
2510 security_dict1 = {'id': 'fake security service id1',
2511 'project_id': self.fake_context.project_id,
2512 'type': 'fake type'}
2513
2514 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2515 db_api.security_service_create(self.fake_context, security_dict1)
2516
2517 self.assertRaises(
2518 exception.ShareNetworkSecurityServiceDissociationError,
2519 db_api.share_network_remove_security_service,
2520 self.fake_context,
2521 self.share_nw_dict['id'],
2522 security_dict1['id'])
2523
2524 def test_security_services_relation(self):
2525 security_dict1 = {'id': 'fake security service id1',
2526 'project_id': self.fake_context.project_id,
2527 'type': 'fake type'}
2528
2529 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2530 db_api.security_service_create(self.fake_context, security_dict1)
2531
2532 result = db_api.share_network_get(self.fake_context,
2533 self.share_nw_dict['id'])
2534
2535 self.assertEqual(0, len(result['security_services']))
2536
2537 def test_shares_relation(self):
2538 share_dict = {'id': 'fake share id1'}
2539
2540 db_api.share_network_create(self.fake_context, self.share_nw_dict)
2541 db_api.share_create(self.fake_context, share_dict)
2542
2543 result = db_api.share_network_get(self.fake_context,
2544 self.share_nw_dict['id'])
2545
2546 self.assertEqual(0, len(result['share_instances']))
2547
2548
2549 @ddt.ddt
2550 class ShareNetworkSubnetDatabaseAPITestCase(BaseDatabaseAPITestCase):
2551
2552 def __init__(self, *args, **kwargs):
2553 super(ShareNetworkSubnetDatabaseAPITestCase, self).__init__(
2554 *args, **kwargs)
2555 self.fake_context = context.RequestContext(user_id='fake user',
2556 project_id='fake project',
2557 is_admin=False)
2558
2559 def setUp(self):
2560 super(ShareNetworkSubnetDatabaseAPITestCase, self).setUp()
2561 self.subnet_dict = {'id': 'fake network id',
2562 'neutron_net_id': 'fake net id',
2563 'neutron_subnet_id': 'fake subnet id',
2564 'network_type': 'vlan',
2565 'segmentation_id': 1000,
2566 'share_network_id': 'fake_id',
2567 'cidr': '10.0.0.0/24',
2568 'ip_version': 4,
2569 'availability_zone_id': None}
2570
2571 def test_create(self):
2572 result = db_api.share_network_subnet_create(
2573 self.fake_context, self.subnet_dict)
2574 self._check_fields(expected=self.subnet_dict, actual=result)
2575
2576 def test_create_duplicated_id(self):
2577 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2578
2579 self.assertRaises(db_exception.DBDuplicateEntry,
2580 db_api.share_network_subnet_create,
2581 self.fake_context,
2582 self.subnet_dict)
2583
2584 def test_get(self):
2585 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2586
2587 result = db_api.share_network_subnet_get(self.fake_context,
2588 self.subnet_dict['id'])
2589 self._check_fields(expected=self.subnet_dict, actual=result)
2590
2591 @ddt.data([{'id': 'fake_id_1', 'identifier': 'fake_identifier',
2592 'host': 'fake_host'}],
2593 [{'id': 'fake_id_2', 'identifier': 'fake_identifier',
2594 'host': 'fake_host'},
2595 {'id': 'fake_id_3', 'identifier': 'fake_identifier',
2596 'host': 'fake_host'}])
2597 def test_get_with_share_servers(self, share_servers):
2598 db_api.share_network_subnet_create(self.fake_context,
2599 self.subnet_dict)
2600
2601 for share_server in share_servers:
2602 share_server['share_network_subnet_id'] = self.subnet_dict['id']
2603 db_api.share_server_create(self.fake_context, share_server)
2604
2605 result = db_api.share_network_subnet_get(self.fake_context,
2606 self.subnet_dict['id'])
2607
2608 self.assertEqual(len(share_servers),
2609 len(result['share_servers']))
2610
2611 for index, share_server in enumerate(share_servers):
2612 self._check_fields(expected=share_server,
2613 actual=result['share_servers'][index])
2614
2615 def test_get_not_found(self):
2616 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2617
2618 self.assertRaises(exception.ShareNetworkSubnetNotFound,
2619 db_api.share_network_subnet_get,
2620 self.fake_context,
2621 'fake_id')
2622
2623 def test_delete(self):
2624 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2625 db_api.share_network_subnet_delete(self.fake_context,
2626 self.subnet_dict['id'])
2627
2628 self.assertRaises(exception.ShareNetworkSubnetNotFound,
2629 db_api.share_network_subnet_delete,
2630 self.fake_context,
2631 self.subnet_dict['id'])
2632
2633 def test_delete_not_found(self):
2634 self.assertRaises(exception.ShareNetworkSubnetNotFound,
2635 db_api.share_network_subnet_delete,
2636 self.fake_context,
2637 'fake_id')
2638
2639 def test_update(self):
2640 update_dict = {
2641 'gateway': 'fake_gateway',
2642 'ip_version': 6,
2643 'mtu': ''
2644 }
2645
2646 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2647 db_api.share_network_subnet_update(
2648 self.fake_context, self.subnet_dict['id'], update_dict)
2649
2650 result = db_api.share_network_subnet_get(self.fake_context,
2651 self.subnet_dict['id'])
2652 self._check_fields(expected=update_dict, actual=result)
2653
2654 def test_update_not_found(self):
2655 self.assertRaises(exception.ShareNetworkSubnetNotFound,
2656 db_api.share_network_subnet_update,
2657 self.fake_context,
2658 self.subnet_dict['id'],
2659 {})
2660
2661 @ddt.data([
2662 {
2663 'id': 'sn_id1',
2664 'project_id': 'fake project',
2665 'user_id': 'fake'
2666 }
2667 ], [
2668 {
2669 'id': 'fake_id',
2670 'project_id': 'fake project',
2671 'user_id': 'fake'
2672 },
2673 {
2674 'id': 'sn_id2',
2675 'project_id': 'fake project',
2676 'user_id': 'fake'
2677 }
2678 ])
2679 def test_get_all_by_share_network(self, share_networks):
2680
2681 for idx, share_network in enumerate(share_networks):
2682 self.subnet_dict['share_network_id'] = share_network['id']
2683 self.subnet_dict['id'] = 'fake_id%s' % idx
2684
2685 db_api.share_network_create(self.fake_context, share_network)
2686 db_api.share_network_subnet_create(self.fake_context,
2687 self.subnet_dict)
2688 for share_network in share_networks:
2689 subnets = db_api.share_network_subnet_get_all_by_share_network(
2690 self.fake_context, share_network['id'])
2691 self.assertEqual(1, len(subnets))
2692
2693 def test_get_by_availability_zone_id(self):
2694 az = db_api.availability_zone_create_if_not_exist(self.fake_context,
2695 'fake_zone_id')
2696 self.subnet_dict['availability_zone_id'] = az['id']
2697 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2698
2699 result = db_api.share_network_subnet_get_by_availability_zone_id(
2700 self.fake_context, self.subnet_dict['share_network_id'], az['id'])
2701
2702 self._check_fields(expected=self.subnet_dict, actual=result)
2703
2704 def test_get_default_subnet(self):
2705 db_api.share_network_subnet_create(self.fake_context, self.subnet_dict)
2706
2707 result = db_api.share_network_subnet_get_default_subnet(
2708 self.fake_context, self.subnet_dict['share_network_id'])
2709
2710 self._check_fields(expected=self.subnet_dict, actual=result)
2711
2712
2713 @ddt.ddt
2714 class SecurityServiceDatabaseAPITestCase(BaseDatabaseAPITestCase):
2715
2716 def __init__(self, *args, **kwargs):
2717 super(SecurityServiceDatabaseAPITestCase, self).__init__(*args,
2718 **kwargs)
2719
2720 self.fake_context = context.RequestContext(user_id='fake user',
2721 project_id='fake project',
2722 is_admin=False)
2723
2724 def _check_expected_fields(self, result, expected):
2725 for key in expected:
2726 self.assertEqual(expected[key], result[key])
2727
2728 def test_create(self):
2729 result = db_api.security_service_create(self.fake_context,
2730 security_service_dict)
2731
2732 self._check_expected_fields(result, security_service_dict)
2733
2734 def test_create_with_duplicated_id(self):
2735 db_api.security_service_create(self.fake_context,
2736 security_service_dict)
2737
2738 self.assertRaises(db_exception.DBDuplicateEntry,
2739 db_api.security_service_create,
2740 self.fake_context,
2741 security_service_dict)
2742
2743 def test_get(self):
2744 db_api.security_service_create(self.fake_context,
2745 security_service_dict)
2746
2747 result = db_api.security_service_get(self.fake_context,
2748 security_service_dict['id'])
2749
2750 self._check_expected_fields(result, security_service_dict)
2751
2752 def test_get_not_found(self):
2753 self.assertRaises(exception.SecurityServiceNotFound,
2754 db_api.security_service_get,
2755 self.fake_context,
2756 'wrong id')
2757
2758 def test_delete(self):
2759 db_api.security_service_create(self.fake_context,
2760 security_service_dict)
2761
2762 db_api.security_service_delete(self.fake_context,
2763 security_service_dict['id'])
2764
2765 self.assertRaises(exception.SecurityServiceNotFound,
2766 db_api.security_service_get,
2767 self.fake_context,
2768 security_service_dict['id'])
2769
2770 def test_update(self):
2771 update_dict = {
2772 'dns_ip': 'new dns',
2773 'server': 'new ldap server',
2774 'domain': 'new ldap domain',
2775 'ou': 'new ldap ou',
2776 'user': 'new user',
2777 'password': 'new password',
2778 'name': 'new whatever',
2779 'description': 'new nevermind',
2780 }
2781
2782 db_api.security_service_create(self.fake_context,
2783 security_service_dict)
2784
2785 result = db_api.security_service_update(self.fake_context,
2786 security_service_dict['id'],
2787 update_dict)
2788
2789 self._check_expected_fields(result, update_dict)
2790
2791 def test_update_no_updates(self):
2792 db_api.security_service_create(self.fake_context,
2793 security_service_dict)
2794
2795 result = db_api.security_service_update(self.fake_context,
2796 security_service_dict['id'],
2797 {})
2798
2799 self._check_expected_fields(result, security_service_dict)
2800
2801 def test_update_not_found(self):
2802 self.assertRaises(exception.SecurityServiceNotFound,
2803 db_api.security_service_update,
2804 self.fake_context,
2805 'wrong id',
2806 {})
2807
2808 def test_get_all_no_records(self):
2809 result = db_api.security_service_get_all(self.fake_context)
2810
2811 self.assertEqual(0, len(result))
2812
2813 @ddt.data(1, 2)
2814 def test_get_all(self, records_count):
2815 index = 0
2816 services = []
2817 while index < records_count:
2818 service_dict = dict(security_service_dict)
2819 service_dict.update({'id': 'fake_id%s' % index})
2820 services.append(service_dict)
2821 db_api.security_service_create(self.fake_context, service_dict)
2822 index += 1
2823
2824 result = db_api.security_service_get_all(self.fake_context)
2825
2826 self.assertEqual(len(services), len(result))
2827 for index, service in enumerate(services):
2828 self._check_fields(expected=service, actual=result[index])
2829
2830 def test_get_all_two_records(self):
2831 dict1 = security_service_dict
2832 dict2 = security_service_dict.copy()
2833 dict2['id'] = 'fake id 2'
2834 db_api.security_service_create(self.fake_context,
2835 dict1)
2836 db_api.security_service_create(self.fake_context,
2837 dict2)
2838
2839 result = db_api.security_service_get_all(self.fake_context)
2840
2841 self.assertEqual(2, len(result))
2842
2843 def test_get_all_by_project(self):
2844 dict1 = security_service_dict
2845 dict2 = security_service_dict.copy()
2846 dict2['id'] = 'fake id 2'
2847 dict2['project_id'] = 'fake project 2'
2848 db_api.security_service_create(self.fake_context,
2849 dict1)
2850 db_api.security_service_create(self.fake_context,
2851 dict2)
2852
2853 result1 = db_api.security_service_get_all_by_project(
2854 self.fake_context,
2855 dict1['project_id'])
2856
2857 self.assertEqual(1, len(result1))
2858 self._check_expected_fields(result1[0], dict1)
2859
2860 result2 = db_api.security_service_get_all_by_project(
2861 self.fake_context,
2862 dict2['project_id'])
2863
2864 self.assertEqual(1, len(result2))
2865 self._check_expected_fields(result2[0], dict2)
2866
2867
2868 @ddt.ddt
2869 class ShareServerDatabaseAPITestCase(test.TestCase):
2870
2871 def setUp(self):
2872 super(ShareServerDatabaseAPITestCase, self).setUp()
2873 self.ctxt = context.RequestContext(user_id='user_id',
2874 project_id='project_id',
2875 is_admin=True)
2876
2877 def test_share_server_get(self):
2878 expected = db_utils.create_share_server()
2879 server = db_api.share_server_get(self.ctxt, expected['id'])
2880 self.assertEqual(expected['id'], server['id'])
2881 self.assertEqual(expected.share_network_subnet_id,
2882 server.share_network_subnet_id)
2883 self.assertEqual(expected.host, server.host)
2884 self.assertEqual(expected.status, server.status)
2885
2886 def test_get_not_found(self):
2887 fake_id = 'FAKE_UUID'
2888 self.assertRaises(exception.ShareServerNotFound,
2889 db_api.share_server_get, self.ctxt, fake_id)
2890
2891 def test_create(self):
2892 server = db_utils.create_share_server()
2893 self.assertTrue(server['id'])
2894 self.assertEqual(server.share_network_subnet_id,
2895 server['share_network_subnet_id'])
2896 self.assertEqual(server.host, server['host'])
2897 self.assertEqual(server.status, server['status'])
2898
2899 def test_delete(self):
2900 server = db_utils.create_share_server()
2901 num_records = len(db_api.share_server_get_all(self.ctxt))
2902 db_api.share_server_delete(self.ctxt, server['id'])
2903 self.assertEqual(num_records - 1,
2904 len(db_api.share_server_get_all(self.ctxt)))
2905
2906 def test_delete_not_found(self):
2907 fake_id = 'FAKE_UUID'
2908 self.assertRaises(exception.ShareServerNotFound,
2909 db_api.share_server_delete,
2910 self.ctxt, fake_id)
2911
2912 def test_update(self):
2913 update = {
2914 'share_network_id': 'update_net',
2915 'host': 'update_host',
2916 'status': constants.STATUS_ACTIVE,
2917 }
2918 server = db_utils.create_share_server()
2919 updated_server = db_api.share_server_update(self.ctxt, server['id'],
2920 update)
2921 self.assertEqual(server['id'], updated_server['id'])
2922 self.assertEqual(update['share_network_id'],
2923 updated_server.share_network_id)
2924 self.assertEqual(update['host'], updated_server.host)
2925 self.assertEqual(update['status'], updated_server.status)
2926
2927 def test_update_not_found(self):
2928 fake_id = 'FAKE_UUID'
2929 self.assertRaises(exception.ShareServerNotFound,
2930 db_api.share_server_update,
2931 self.ctxt, fake_id, {})
2932
2933 def test_get_all_by_host_and_share_net_valid(self):
2934 subnet_1 = {
2935 'id': '1',
2936 'share_network_id': '1',
2937 }
2938 subnet_2 = {
2939 'id': '2',
2940 'share_network_id': '2',
2941 }
2942 valid = {
2943 'share_network_subnet_id': '1',
2944 'host': 'host1',
2945 'status': constants.STATUS_ACTIVE,
2946 }
2947 invalid = {
2948 'share_network_subnet_id': '2',
2949 'host': 'host1',
2950 'status': constants.STATUS_ERROR,
2951 }
2952 other = {
2953 'share_network_subnet_id': '1',
2954 'host': 'host2',
2955 'status': constants.STATUS_ACTIVE,
2956 }
2957 db_utils.create_share_network_subnet(**subnet_1)
2958 db_utils.create_share_network_subnet(**subnet_2)
2959 valid = db_utils.create_share_server(**valid)
2960 db_utils.create_share_server(**invalid)
2961 db_utils.create_share_server(**other)
2962
2963 servers = db_api.share_server_get_all_by_host_and_share_subnet_valid(
2964 self.ctxt,
2965 host='host1',
2966 share_subnet_id='1')
2967 self.assertEqual(valid['id'], servers[0]['id'])
2968
2969 def test_get_all_by_host_and_share_net_not_found(self):
2970 self.assertRaises(
2971 exception.ShareServerNotFound,
2972 db_api.share_server_get_all_by_host_and_share_subnet_valid,
2973 self.ctxt, host='fake', share_subnet_id='fake'
2974 )
2975
2976 def test_get_all(self):
2977 srv1 = {
2978 'share_network_id': '1',
2979 'host': 'host1',
2980 'status': constants.STATUS_ACTIVE,
2981 }
2982 srv2 = {
2983 'share_network_id': '1',
2984 'host': 'host1',
2985 'status': constants.STATUS_ERROR,
2986 }
2987 srv3 = {
2988 'share_network_id': '2',
2989 'host': 'host2',
2990 'status': constants.STATUS_ACTIVE,
2991 }
2992 servers = db_api.share_server_get_all(self.ctxt)
2993 self.assertEqual(0, len(servers))
2994
2995 to_delete = db_utils.create_share_server(**srv1)
2996 db_utils.create_share_server(**srv2)
2997 db_utils.create_share_server(**srv3)
2998
2999 servers = db_api.share_server_get_all(self.ctxt)
3000 self.assertEqual(3, len(servers))
3001
3002 db_api.share_server_delete(self.ctxt, to_delete['id'])
3003 servers = db_api.share_server_get_all(self.ctxt)
3004 self.assertEqual(2, len(servers))
3005
3006 def test_backend_details_set(self):
3007 details = {
3008 'value1': '1',
3009 'value2': '2',
3010 }
3011 server = db_utils.create_share_server()
3012 db_api.share_server_backend_details_set(self.ctxt, server['id'],
3013 details)
3014
3015 self.assertDictMatch(
3016 details,
3017 db_api.share_server_get(self.ctxt, server['id'])['backend_details']
3018 )
3019
3020 def test_backend_details_set_not_found(self):
3021 fake_id = 'FAKE_UUID'
3022 self.assertRaises(exception.ShareServerNotFound,
3023 db_api.share_server_backend_details_set,
3024 self.ctxt, fake_id, {})
3025
3026 def test_get_with_details(self):
3027 values = {
3028 'share_network_subnet_id': 'fake-share-net-id',
3029 'host': 'hostname',
3030 'status': constants.STATUS_ACTIVE,
3031 }
3032 details = {
3033 'value1': '1',
3034 'value2': '2',
3035 }
3036 srv_id = db_utils.create_share_server(**values)['id']
3037 db_api.share_server_backend_details_set(self.ctxt, srv_id, details)
3038 server = db_api.share_server_get(self.ctxt, srv_id)
3039 self.assertEqual(srv_id, server['id'])
3040 self.assertEqual(values['share_network_subnet_id'],
3041 server.share_network_subnet_id)
3042 self.assertEqual(values['host'], server.host)
3043 self.assertEqual(values['status'], server.status)
3044 self.assertDictMatch(server['backend_details'], details)
3045 self.assertIn('backend_details', server.to_dict())
3046
3047 def test_delete_with_details(self):
3048 server = db_utils.create_share_server(backend_details={
3049 'value1': '1',
3050 'value2': '2',
3051 })
3052
3053 num_records = len(db_api.share_server_get_all(self.ctxt))
3054 db_api.share_server_delete(self.ctxt, server['id'])
3055 self.assertEqual(num_records - 1,
3056 len(db_api.share_server_get_all(self.ctxt)))
3057
3058 @ddt.data('fake', '-fake-', 'foo_some_fake_identifier_bar',
3059 'foo-some-fake-identifier-bar', 'foobar')
3060 def test_share_server_search_by_identifier(self, identifier):
3061
3062 server = {
3063 'share_network_id': 'fake-share-net-id',
3064 'host': 'hostname',
3065 'status': constants.STATUS_ACTIVE,
3066 'is_auto_deletable': True,
3067 'updated_at': datetime.datetime(2018, 5, 1),
3068 'identifier': 'some_fake_identifier',
3069 }
3070
3071 server = db_utils.create_share_server(**server)
3072 if identifier == 'foobar':
3073 self.assertRaises(exception.ShareServerNotFound,
3074 db_api.share_server_search_by_identifier,
3075 self.ctxt, identifier)
3076 else:
3077 result = db_api.share_server_search_by_identifier(
3078 self.ctxt, identifier)
3079 self.assertEqual(server['id'], result[0]['id'])
3080
3081 @ddt.data((True, True, True, 3),
3082 (True, True, False, 2),
3083 (True, False, False, 1),
3084 (False, False, False, 0))
3085 @ddt.unpack
3086 def test_share_server_get_all_unused_deletable(self,
3087 server_1_is_auto_deletable,
3088 server_2_is_auto_deletable,
3089 server_3_is_auto_deletable,
3090 expected_len):
3091 server1 = {
3092 'share_network_id': 'fake-share-net-id',
3093 'host': 'hostname',
3094 'status': constants.STATUS_ACTIVE,
3095 'is_auto_deletable': server_1_is_auto_deletable,
3096 'updated_at': datetime.datetime(2018, 5, 1)
3097 }
3098 server2 = {
3099 'share_network_id': 'fake-share-net-id',
3100 'host': 'hostname',
3101 'status': constants.STATUS_ACTIVE,
3102 'is_auto_deletable': server_2_is_auto_deletable,
3103 'updated_at': datetime.datetime(2018, 5, 1)
3104 }
3105 server3 = {
3106 'share_network_id': 'fake-share-net-id',
3107 'host': 'hostname',
3108 'status': constants.STATUS_ACTIVE,
3109 'is_auto_deletable': server_3_is_auto_deletable,
3110 'updated_at': datetime.datetime(2018, 5, 1)
3111 }
3112 db_utils.create_share_server(**server1)
3113 db_utils.create_share_server(**server2)
3114 db_utils.create_share_server(**server3)
3115 host = 'hostname'
3116 updated_before = datetime.datetime(2019, 5, 1)
3117
3118 unused_deletable = db_api.share_server_get_all_unused_deletable(
3119 self.ctxt, host, updated_before)
3120 self.assertEqual(expected_len, len(unused_deletable))
3121
3122 @ddt.data({'host': 'fakepool@fakehost'},
3123 {'status': constants.STATUS_SERVER_MIGRATING_TO},
3124 {'source_share_server_id': 'fake_ss_id'})
3125 def test_share_server_get_all_with_filters(self, filters):
3126 db_utils.create_share_server(**filters)
3127 db_utils.create_share_server()
3128 filter_keys = filters.keys()
3129
3130 results = db_api.share_server_get_all_with_filters(self.ctxt, filters)
3131
3132 self.assertEqual(1, len(results))
3133 for result in results:
3134 for key in filter_keys:
3135 self.assertEqual(result[key], filters[key])
3136
3137 @ddt.data('fake@fake', 'host1@backend1')
3138 def test_share_server_get_all_by_host(self, host):
3139 db_utils.create_share_server(host='fake@fake')
3140 db_utils.create_share_server(host='host1@backend1')
3141
3142 share_servers = db_api.share_server_get_all_by_host(self.ctxt, host)
3143
3144 self.assertEqual(1, len(share_servers))
3145 for share_server in share_servers:
3146 self.assertEqual(host, share_server['host'])
3147
3148
3149 class ServiceDatabaseAPITestCase(test.TestCase):
3150
3151 def setUp(self):
3152 super(ServiceDatabaseAPITestCase, self).setUp()
3153 self.ctxt = context.RequestContext(user_id='user_id',
3154 project_id='project_id',
3155 is_admin=True)
3156
3157 self.service_data = {'host': "fake_host",
3158 'binary': "fake_binary",
3159 'topic': "fake_topic",
3160 'report_count': 0,
3161 'availability_zone': "fake_zone"}
3162
3163 def test_create(self):
3164 service = db_api.service_create(self.ctxt, self.service_data)
3165 az = db_api.availability_zone_get(self.ctxt, "fake_zone")
3166
3167 self.assertEqual(az.id, service.availability_zone_id)
3168 self.assertSubDictMatch(self.service_data, service.to_dict())
3169
3170 def test_update(self):
3171 az_name = 'fake_zone2'
3172 update_data = {"availability_zone": az_name}
3173
3174 service = db_api.service_create(self.ctxt, self.service_data)
3175 db_api.service_update(self.ctxt, service['id'], update_data)
3176 service = db_api.service_get(self.ctxt, service['id'])
3177
3178 az = db_api.availability_zone_get(self.ctxt, az_name)
3179 self.assertEqual(az.id, service.availability_zone_id)
3180 valid_values = self.service_data
3181 valid_values.update(update_data)
3182 self.assertSubDictMatch(valid_values, service.to_dict())
3183
3184
3185 @ddt.ddt
3186 class AvailabilityZonesDatabaseAPITestCase(test.TestCase):
3187
3188 def setUp(self):
3189 super(AvailabilityZonesDatabaseAPITestCase, self).setUp()
3190 self.ctxt = context.RequestContext(user_id='user_id',
3191 project_id='project_id',
3192 is_admin=True)
3193
3194 @ddt.data({'fake': 'fake'}, {}, {'fakeavailability_zone': 'fake'},
3195 {'availability_zone': None}, {'availability_zone': ''})
3196 def test__ensure_availability_zone_exists_invalid(self, test_values):
3197 session = db_api.get_session()
3198
3199 self.assertRaises(ValueError, db_api._ensure_availability_zone_exists,
3200 self.ctxt, test_values, session)
3201
3202 def test_az_get(self):
3203 az_name = 'test_az'
3204 az = db_api.availability_zone_create_if_not_exist(self.ctxt, az_name)
3205
3206 az_by_id = db_api.availability_zone_get(self.ctxt, az['id'])
3207 az_by_name = db_api.availability_zone_get(self.ctxt, az_name)
3208
3209 self.assertEqual(az_name, az_by_id['name'])
3210 self.assertEqual(az_name, az_by_name['name'])
3211 self.assertEqual(az['id'], az_by_id['id'])
3212 self.assertEqual(az['id'], az_by_name['id'])
3213
3214 def test_az_get_all(self):
3215 db_api.availability_zone_create_if_not_exist(self.ctxt, 'test1')
3216 db_api.availability_zone_create_if_not_exist(self.ctxt, 'test2')
3217 db_api.availability_zone_create_if_not_exist(self.ctxt, 'test3')
3218 db_api.service_create(self.ctxt, {'availability_zone': 'test2'})
3219
3220 actual_result = db_api.availability_zone_get_all(self.ctxt)
3221
3222 self.assertEqual(1, len(actual_result))
3223 self.assertEqual('test2', actual_result[0]['name'])
3224
3225
3226 @ddt.ddt
3227 class NetworkAllocationsDatabaseAPITestCase(test.TestCase):
3228
3229 def setUp(self):
3230 super(NetworkAllocationsDatabaseAPITestCase, self).setUp()
3231 self.user_id = 'user_id'
3232 self.project_id = 'project_id'
3233 self.share_server_id = 'foo_share_server_id'
3234 self.ctxt = context.RequestContext(
3235 user_id=self.user_id, project_id=self.project_id, is_admin=True)
3236 self.user_network_allocations = [
3237 {'share_server_id': self.share_server_id,
3238 'ip_address': '1.1.1.1',
3239 'status': constants.STATUS_ACTIVE,
3240 'label': None},
3241 {'share_server_id': self.share_server_id,
3242 'ip_address': '2.2.2.2',
3243 'status': constants.STATUS_ACTIVE,
3244 'label': 'user'},
3245 ]
3246 self.admin_network_allocations = [
3247 {'share_server_id': self.share_server_id,
3248 'ip_address': '3.3.3.3',
3249 'status': constants.STATUS_ACTIVE,
3250 'label': 'admin'},
3251 {'share_server_id': self.share_server_id,
3252 'ip_address': '4.4.4.4',
3253 'status': constants.STATUS_ACTIVE,
3254 'label': 'admin'},
3255 ]
3256
3257 def _setup_network_allocations_get_for_share_server(self):
3258 # Create share network
3259 share_network_data = {
3260 'id': 'foo_share_network_id',
3261 'user_id': self.user_id,
3262 'project_id': self.project_id,
3263 }
3264 db_api.share_network_create(self.ctxt, share_network_data)
3265
3266 # Create share server
3267 share_server_data = {
3268 'id': self.share_server_id,
3269 'share_network_id': share_network_data['id'],
3270 'host': 'fake_host',
3271 'status': 'active',
3272 }
3273 db_api.share_server_create(self.ctxt, share_server_data)
3274
3275 # Create user network allocations
3276 for user_network_allocation in self.user_network_allocations:
3277 db_api.network_allocation_create(
3278 self.ctxt, user_network_allocation)
3279
3280 # Create admin network allocations
3281 for admin_network_allocation in self.admin_network_allocations:
3282 db_api.network_allocation_create(
3283 self.ctxt, admin_network_allocation)
3284
3285 def test_get_only_user_network_allocations(self):
3286 self._setup_network_allocations_get_for_share_server()
3287
3288 result = db_api.network_allocations_get_for_share_server(
3289 self.ctxt, self.share_server_id, label='user')
3290
3291 self.assertEqual(
3292 len(self.user_network_allocations), len(result))
3293 for na in result:
3294 self.assertIn(na.label, (None, 'user'))
3295
3296 def test_get_only_admin_network_allocations(self):
3297 self._setup_network_allocations_get_for_share_server()
3298
3299 result = db_api.network_allocations_get_for_share_server(
3300 self.ctxt, self.share_server_id, label='admin')
3301
3302 self.assertEqual(
3303 len(self.admin_network_allocations), len(result))
3304 for na in result:
3305 self.assertEqual(na.label, 'admin')
3306
3307 def test_get_all_network_allocations(self):
3308 self._setup_network_allocations_get_for_share_server()
3309
3310 result = db_api.network_allocations_get_for_share_server(
3311 self.ctxt, self.share_server_id, label=None)
3312
3313 self.assertEqual(
3314 len(self.user_network_allocations +
3315 self.admin_network_allocations),
3316 len(result)
3317 )
3318 for na in result:
3319 self.assertIn(na.label, ('admin', 'user', None))
3320
3321 def test_network_allocation_get(self):
3322 self._setup_network_allocations_get_for_share_server()
3323
3324 for allocation in self.admin_network_allocations:
3325 result = db_api.network_allocation_get(self.ctxt, allocation['id'])
3326
3327 self.assertIsInstance(result, models.NetworkAllocation)
3328 self.assertEqual(allocation['id'], result.id)
3329
3330 for allocation in self.user_network_allocations:
3331 result = db_api.network_allocation_get(self.ctxt, allocation['id'])
3332
3333 self.assertIsInstance(result, models.NetworkAllocation)
3334 self.assertEqual(allocation['id'], result.id)
3335
3336 def test_network_allocation_get_no_result(self):
3337 self._setup_network_allocations_get_for_share_server()
3338
3339 self.assertRaises(exception.NotFound,
3340 db_api.network_allocation_get,
3341 self.ctxt,
3342 id='fake')
3343
3344 @ddt.data(True, False)
3345 def test_network_allocation_get_read_deleted(self, read_deleted):
3346 self._setup_network_allocations_get_for_share_server()
3347
3348 deleted_allocation = {
3349 'share_server_id': self.share_server_id,
3350 'ip_address': '1.1.1.1',
3351 'status': constants.STATUS_ACTIVE,
3352 'label': None,
3353 'deleted': True,
3354 }
3355
3356 new_obj = db_api.network_allocation_create(self.ctxt,
3357 deleted_allocation)
3358 if read_deleted:
3359 result = db_api.network_allocation_get(self.ctxt, new_obj.id,
3360 read_deleted=read_deleted)
3361 self.assertIsInstance(result, models.NetworkAllocation)
3362 self.assertEqual(new_obj.id, result.id)
3363 else:
3364 self.assertRaises(exception.NotFound,
3365 db_api.network_allocation_get,
3366 self.ctxt,
3367 id=self.share_server_id)
3368
3369 def test_network_allocation_update(self):
3370 self._setup_network_allocations_get_for_share_server()
3371
3372 for allocation in self.admin_network_allocations:
3373 old_obj = db_api.network_allocation_get(self.ctxt,
3374 allocation['id'])
3375 self.assertEqual('False', old_obj.deleted)
3376 updated_object = db_api.network_allocation_update(
3377 self.ctxt, allocation['id'], {'deleted': 'True'})
3378
3379 self.assertEqual('True', updated_object.deleted)
3380
3381 @ddt.data(True, False)
3382 def test_network_allocation_update_read_deleted(self, read_deleted):
3383 self._setup_network_allocations_get_for_share_server()
3384
3385 db_api.network_allocation_update(
3386 self.ctxt,
3387 self.admin_network_allocations[0]['id'],
3388 {'deleted': 'True'}
3389 )
3390
3391 if read_deleted:
3392 updated_object = db_api.network_allocation_update(
3393 self.ctxt, self.admin_network_allocations[0]['id'],
3394 {'deleted': 'False'}, read_deleted=read_deleted
3395 )
3396 self.assertEqual('False', updated_object.deleted)
3397 else:
3398 self.assertRaises(exception.NotFound,
3399 db_api.network_allocation_update,
3400 self.ctxt,
3401 id=self.share_server_id,
3402 values={'deleted': read_deleted},
3403 read_deleted=read_deleted)
3404
3405
3406 class ReservationDatabaseAPITest(test.TestCase):
3407
3408 def setUp(self):
3409 super(ReservationDatabaseAPITest, self).setUp()
3410 self.context = context.get_admin_context()
3411
3412 def test_reservation_expire(self):
3413 quota_usage = db_api.quota_usage_create(self.context, 'fake_project',
3414 'fake_user', 'fake_resource',
3415 0, 12, until_refresh=None)
3416 session = db_api.get_session()
3417 for time_s in (-1, 1):
3418 reservation = db_api._reservation_create(
3419 self.context, 'fake_uuid',
3420 quota_usage, 'fake_project',
3421 'fake_user', 'fake_resource', 10,
3422 timeutils.utcnow() +
3423 datetime.timedelta(days=time_s),
3424 session=session)
3425
3426 db_api.reservation_expire(self.context)
3427
3428 reservations = db_api._quota_reservations_query(session, self.context,
3429 ['fake_uuid']).all()
3430 quota_usage = db_api.quota_usage_get(self.context, 'fake_project',
3431 'fake_resource')
3432 self.assertEqual(1, len(reservations))
3433 self.assertEqual(reservation['id'], reservations[0]['id'])
3434 self.assertEqual(2, quota_usage['reserved'])
3435
3436
3437 @ddt.ddt
3438 class PurgeDeletedTest(test.TestCase):
3439
3440 def setUp(self):
3441 super(PurgeDeletedTest, self).setUp()
3442 self.context = context.get_admin_context()
3443
3444 def _days_ago(self, begin, end):
3445 return timeutils.utcnow() - datetime.timedelta(
3446 days=random.randint(begin, end))
3447
3448 def _sqlite_has_fk_constraint(self):
3449 # SQLAlchemy doesn't support it at all with < SQLite 3.6.19
3450 import sqlite3
3451 tup = sqlite3.sqlite_version_info
3452 return tup[0] > 3 or (tup[0] == 3 and tup[1] >= 7)
3453
3454 def _turn_on_foreign_key(self):
3455 engine = db_api.get_engine()
3456 connection = engine.raw_connection()
3457 try:
3458 cursor = connection.cursor()
3459 cursor.execute("PRAGMA foreign_keys = ON")
3460 finally:
3461 connection.close()
3462
3463 @ddt.data({"del_days": 0, "num_left": 0},
3464 {"del_days": 10, "num_left": 2},
3465 {"del_days": 20, "num_left": 4})
3466 @ddt.unpack
3467 def test_purge_records_with_del_days(self, del_days, num_left):
3468 fake_now = timeutils.utcnow()
3469 with mock.patch.object(timeutils, 'utcnow',
3470 mock.Mock(return_value=fake_now)):
3471 # create resources soft-deleted in 0~9, 10~19 days ago
3472 for start, end in ((0, 9), (10, 19)):
3473 for unused in range(2):
3474 # share type
3475 db_utils.create_share_type(id=uuidutils.generate_uuid(),
3476 deleted_at=self._days_ago(start,
3477 end))
3478 # share
3479 share = db_utils.create_share_without_instance(
3480 metadata={},
3481 deleted_at=self._days_ago(start, end))
3482 # create share network
3483 network = db_utils.create_share_network(
3484 id=uuidutils.generate_uuid(),
3485 deleted_at=self._days_ago(start, end))
3486 # create security service
3487 db_utils.create_security_service(
3488 id=uuidutils.generate_uuid(),
3489 share_network_id=network.id,
3490 deleted_at=self._days_ago(start, end))
3491 # create share instance
3492 s_instance = db_utils.create_share_instance(
3493 id=uuidutils.generate_uuid(),
3494 share_network_id=network.id,
3495 share_id=share.id)
3496 # share access
3497 db_utils.create_share_access(
3498 id=uuidutils.generate_uuid(),
3499 share_id=share['id'],
3500 deleted_at=self._days_ago(start, end))
3501 # create share server
3502 db_utils.create_share_server(
3503 id=uuidutils.generate_uuid(),
3504 deleted_at=self._days_ago(start, end),
3505 share_network_id=network.id)
3506 # create snapshot
3507 db_api.share_snapshot_create(
3508 self.context, {'share_id': share['id'],
3509 'deleted_at': self._days_ago(start,
3510 end)},
3511 create_snapshot_instance=False)
3512 # update share instance
3513 db_api.share_instance_update(
3514 self.context,
3515 s_instance.id,
3516 {'deleted_at': self._days_ago(start, end)})
3517
3518 db_api.purge_deleted_records(self.context, age_in_days=del_days)
3519
3520 for model in [models.ShareTypes, models.Share,
3521 models.ShareNetwork, models.ShareAccessMapping,
3522 models.ShareInstance, models.ShareServer,
3523 models.ShareSnapshot, models.SecurityService]:
3524 rows = db_api.model_query(self.context, model).count()
3525 self.assertEqual(num_left, rows)
3526
3527 def test_purge_records_with_illegal_args(self):
3528 self.assertRaises(TypeError, db_api.purge_deleted_records,
3529 self.context)
3530 self.assertRaises(exception.InvalidParameterValue,
3531 db_api.purge_deleted_records,
3532 self.context,
3533 age_in_days=-1)
3534
3535 def test_purge_records_with_constraint(self):
3536 if not self._sqlite_has_fk_constraint():
3537 self.skipTest(
3538 'sqlite is too old for reliable SQLA foreign_keys')
3539 self._turn_on_foreign_key()
3540 type_id = uuidutils.generate_uuid()
3541 # create share type1
3542 db_utils.create_share_type(id=type_id,
3543 deleted_at=self._days_ago(1, 1))
3544 # create share type2
3545 db_utils.create_share_type(id=uuidutils.generate_uuid(),
3546 deleted_at=self._days_ago(1, 1))
3547 # create share
3548 share = db_utils.create_share(share_type_id=type_id)
3549
3550 db_api.purge_deleted_records(self.context, age_in_days=0)
3551 type_row = db_api.model_query(self.context,
3552 models.ShareTypes).count()
3553 # share type1 should not be deleted
3554 self.assertEqual(1, type_row)
3555 db_api.model_query(self.context, models.ShareInstance).delete()
3556 db_api.share_delete(self.context, share['id'])
3557
3558 db_api.purge_deleted_records(self.context, age_in_days=0)
3559 s_row = db_api.model_query(self.context, models.Share).count()
3560 type_row = db_api.model_query(self.context,
3561 models.ShareTypes).count()
3562 self.assertEqual(0, s_row + type_row)
3563
3564
3565 @ddt.ddt
3566 class ShareTypeAPITestCase(test.TestCase):
3567
3568 def setUp(self):
3569 super(ShareTypeAPITestCase, self).setUp()
3570 self.ctxt = context.RequestContext(
3571 user_id='user_id', project_id='project_id', is_admin=True)
3572
3573 @ddt.data({'used_by_shares': True, 'used_by_group_types': False},
3574 {'used_by_shares': False, 'used_by_group_types': True},
3575 {'used_by_shares': True, 'used_by_group_types': True})
3576 @ddt.unpack
3577 def test_share_type_destroy_in_use(self, used_by_shares,
3578 used_by_group_types):
3579 share_type_1 = db_utils.create_share_type(
3580 name='orange', extra_specs={'somekey': 'someval'},
3581 is_public=False, override_defaults=True)
3582 share_type_2 = db_utils.create_share_type(
3583 name='regalia', override_defaults=True)
3584 db_api.share_type_access_add(self.ctxt,
3585 share_type_1['id'],
3586 "2018ndaetfigovnsaslcahfavmrpions")
3587 db_api.share_type_access_add(self.ctxt,
3588 share_type_1['id'],
3589 "2016ndaetfigovnsaslcahfavmrpions")
3590 if used_by_shares:
3591 share_1 = db_utils.create_share(share_type_id=share_type_1['id'])
3592 db_utils.create_share(share_type_id=share_type_2['id'])
3593 if used_by_group_types:
3594 group_type_1 = db_utils.create_share_group_type(
3595 name='crimson', share_types=[share_type_1['id']])
3596 db_utils.create_share_group_type(
3597 name='tide', share_types=[share_type_2['id']])
3598 share_group_1 = db_utils.create_share_group(
3599 share_group_type_id=group_type_1['id'],
3600 share_types=[share_type_1['id']])
3601
3602 self.assertRaises(exception.ShareTypeInUse,
3603 db_api.share_type_destroy,
3604 self.ctxt, share_type_1['id'])
3605 self.assertRaises(exception.ShareTypeInUse,
3606 db_api.share_type_destroy,
3607 self.ctxt, share_type_2['id'])
3608
3609 # Let's cleanup share_type_1 and verify it is gone
3610 if used_by_shares:
3611 db_api.share_instance_delete(self.ctxt, share_1.instance.id)
3612 if used_by_group_types:
3613 db_api.share_group_destroy(self.ctxt, share_group_1['id'])
3614 db_api.share_group_type_destroy(self.ctxt,
3615 group_type_1['id'])
3616
3617 self.assertIsNone(
3618 db_api.share_type_destroy(self.ctxt, share_type_1['id']))
3619 self.assertDictMatch(
3620 {}, db_api.share_type_extra_specs_get(
3621 self.ctxt, share_type_1['id']))
3622 self.assertRaises(exception.ShareTypeNotFound,
3623 db_api.share_type_access_get_all,
3624 self.ctxt, share_type_1['id'])
3625 self.assertRaises(exception.ShareTypeNotFound,
3626 db_api.share_type_get,
3627 self.ctxt, share_type_1['id'])
3628
3629 # share_type_2 must still be around
3630 self.assertEqual(
3631 share_type_2['id'],
3632 db_api.share_type_get(self.ctxt, share_type_2['id'])['id'])
3633
3634 @ddt.data({'usages': False, 'reservations': False},
3635 {'usages': False, 'reservations': True},
3636 {'usages': True, 'reservations': False})
3637 @ddt.unpack
3638 def test_share_type_destroy_quotas_and_reservations(self, usages,
3639 reservations):
3640 share_type = db_utils.create_share_type(name='clemsontigers')
3641 shares_quota = db_api.quota_create(
3642 self.ctxt, "fake-project-id", 'shares', 10,
3643 share_type_id=share_type['id'])
3644 snapshots_quota = db_api.quota_create(
3645 self.ctxt, "fake-project-id", 'snapshots', 30,
3646 share_type_id=share_type['id'])
3647
3648 if reservations:
3649 resources = {
3650 'shares': quota.ReservableResource('shares', '_sync_shares'),
3651 'snapshots': quota.ReservableResource(
3652 'snapshots', '_sync_snapshots'),
3653 }
3654 project_quotas = {
3655 'shares': shares_quota.hard_limit,
3656 'snapshots': snapshots_quota.hard_limit,
3657 }
3658 user_quotas = {
3659 'shares': shares_quota.hard_limit,
3660 'snapshots': snapshots_quota.hard_limit,
3661 }
3662 deltas = {'shares': 1, 'snapshots': 3}
3663 expire = timeutils.utcnow() + datetime.timedelta(seconds=86400)
3664 reservation_uuids = db_api.quota_reserve(
3665 self.ctxt, resources, project_quotas, user_quotas,
3666 project_quotas, deltas, expire, False, 30,
3667 project_id='fake-project-id', share_type_id=share_type['id'])
3668
3669 db_session = db_api.get_session()
3670 q_reservations = db_api._quota_reservations_query(
3671 db_session, self.ctxt, reservation_uuids).all()
3672 # There should be 2 "user" reservations and 2 "share-type"
3673 # quota reservations
3674 self.assertEqual(4, len(q_reservations))
3675 q_share_type_reservations = [qr for qr in q_reservations
3676 if qr['share_type_id'] is not None]
3677 # There should be exactly two "share type" quota reservations
3678 self.assertEqual(2, len(q_share_type_reservations))
3679 for q_reservation in q_share_type_reservations:
3680 self.assertEqual(q_reservation['share_type_id'],
3681 share_type['id'])
3682
3683 if usages:
3684 db_api.quota_usage_create(self.ctxt, 'fake-project-id',
3685 'fake-user-id', 'shares', 3, 2, False,
3686 share_type_id=share_type['id'])
3687 db_api.quota_usage_create(self.ctxt, 'fake-project-id',
3688 'fake-user-id', 'snapshots', 2, 2, False,
3689 share_type_id=share_type['id'])
3690 q_usages = db_api.quota_usage_get_all_by_project_and_share_type(
3691 self.ctxt, 'fake-project-id', share_type['id'])
3692 self.assertEqual(3, q_usages['shares']['in_use'])
3693 self.assertEqual(2, q_usages['shares']['reserved'])
3694 self.assertEqual(2, q_usages['snapshots']['in_use'])
3695 self.assertEqual(2, q_usages['snapshots']['reserved'])
3696
3697 # Validate that quotas exist
3698 share_type_quotas = db_api.quota_get_all_by_project_and_share_type(
3699 self.ctxt, 'fake-project-id', share_type['id'])
3700 expected_quotas = {
3701 'project_id': 'fake-project-id',
3702 'share_type_id': share_type['id'],
3703 'shares': 10,
3704 'snapshots': 30,
3705 }
3706 self.assertDictMatch(expected_quotas, share_type_quotas)
3707
3708 db_api.share_type_destroy(self.ctxt, share_type['id'])
3709
3710 self.assertRaises(exception.ShareTypeNotFound,
3711 db_api.share_type_get,
3712 self.ctxt, share_type['id'])
3713 # Quotas must be gone
3714 share_type_quotas = db_api.quota_get_all_by_project_and_share_type(
3715 self.ctxt, 'fake-project-id', share_type['id'])
3716 self.assertEqual({'project_id': 'fake-project-id',
3717 'share_type_id': share_type['id']},
3718 share_type_quotas)
3719
3720 # Check usages and reservations
3721 if usages:
3722 q_usages = db_api.quota_usage_get_all_by_project_and_share_type(
3723 self.ctxt, 'fake-project-id', share_type['id'])
3724 expected_q_usages = {'project_id': 'fake-project-id',
3725 'share_type_id': share_type['id']}
3726 self.assertDictMatch(expected_q_usages, q_usages)
3727 if reservations:
3728 q_reservations = db_api._quota_reservations_query(
3729 db_session, self.ctxt, reservation_uuids).all()
3730 # just "user" quota reservations should be left, since we didn't
3731 # clean them up.
3732 self.assertEqual(2, len(q_reservations))
3733 for q_reservation in q_reservations:
3734 self.assertIsNone(q_reservation['share_type_id'])
3735
3736 @ddt.data(
3737 (None, None, 5),
3738 ('fake2', None, 2),
3739 (None, 'fake', 3),
3740 )
3741 @ddt.unpack
3742 def test_share_replica_data_get_for_project(
3743 self, user_id, share_type_id, expected_result):
3744 kwargs = {}
3745 if share_type_id:
3746 kwargs.update({'id': share_type_id})
3747 share_type_1 = db_utils.create_share_type(**kwargs)
3748 share_type_2 = db_utils.create_share_type()
3749
3750 share_1 = db_utils.create_share(size=1, user_id='fake',
3751 share_type_id=share_type_1['id'])
3752 share_2 = db_utils.create_share(size=1, user_id='fake2',
3753 share_type_id=share_type_2['id'])
3754 project_id = share_1['project_id']
3755 db_utils.create_share_replica(
3756 replica_state=constants.REPLICA_STATE_ACTIVE,
3757 share_id=share_1['id'], share_type_id=share_type_1['id'])
3758 db_utils.create_share_replica(
3759 replica_state=constants.REPLICA_STATE_IN_SYNC,
3760 share_id=share_1['id'], share_type_id=share_type_1['id'])
3761 db_utils.create_share_replica(
3762 replica_state=constants.REPLICA_STATE_IN_SYNC,
3763 share_id=share_1['id'], share_type_id=share_type_1['id'])
3764
3765 db_utils.create_share_replica(
3766 replica_state=constants.REPLICA_STATE_ACTIVE,
3767 share_id=share_2['id'], share_type_id=share_type_2['id'])
3768 db_utils.create_share_replica(
3769 replica_state=constants.REPLICA_STATE_IN_SYNC,
3770 share_id=share_2['id'], share_type_id=share_type_2['id'])
3771
3772 kwargs = {}
3773 if user_id:
3774 kwargs.update({'user_id': user_id})
3775 if share_type_id:
3776 kwargs.update({'share_type_id': share_type_id})
3777
3778 total_amount, total_size = db_api.share_replica_data_get_for_project(
3779 self.ctxt, project_id, **kwargs)
3780 self.assertEqual(expected_result, total_amount)
3781 self.assertEqual(expected_result, total_size)
3782
3783 def test_share_type_get_by_name_or_id_found_by_id(self):
3784 share_type = db_utils.create_share_type()
3785
3786 result = db_api.share_type_get_by_name_or_id(
3787 self.ctxt, share_type['id'])
3788
3789 self.assertIsNotNone(result)
3790 self.assertEqual(share_type['id'], result['id'])
3791
3792 def test_share_type_get_by_name_or_id_found_by_name(self):
3793 name = uuidutils.generate_uuid()
3794 db_utils.create_share_type(name=name)
3795
3796 result = db_api.share_type_get_by_name_or_id(self.ctxt, name)
3797
3798 self.assertIsNotNone(result)
3799 self.assertEqual(name, result['name'])
3800 self.assertNotEqual(name, result['id'])
3801
3802 def test_share_type_get_by_name_or_id_when_does_not_exist(self):
3803 fake_id = uuidutils.generate_uuid()
3804
3805 result = db_api.share_type_get_by_name_or_id(self.ctxt, fake_id)
3806
3807 self.assertIsNone(result)
3808
3809 def test_share_type_get_with_none_id(self):
3810 self.assertRaises(exception.DefaultShareTypeNotConfigured,
3811 db_api.share_type_get, self.ctxt, None)
3812
3813 @ddt.data(
3814 {'name': 'st_1', 'description': 'des_1', 'is_public': True},
3815 {'name': 'st_2', 'description': 'des_2', 'is_public': None},
3816 {'name': 'st_3', 'description': None, 'is_public': False},
3817 {'name': None, 'description': 'des_4', 'is_public': True},
3818 )
3819 @ddt.unpack
3820 def test_share_type_update(self, name, description, is_public):
3821 values = {}
3822 if name:
3823 values.update({'name': name})
3824 if description:
3825 values.update({'description': description})
3826 if is_public is not None:
3827 values.update({'is_public': is_public})
3828 share_type = db_utils.create_share_type(name='st_name')
3829 db_api.share_type_update(self.ctxt, share_type['id'], values)
3830 updated_st = db_api.share_type_get_by_name_or_id(self.ctxt,
3831 share_type['id'])
3832 if name:
3833 self.assertEqual(name, updated_st['name'])
3834 if description:
3835 self.assertEqual(description, updated_st['description'])
3836 if is_public is not None:
3837 self.assertEqual(is_public, updated_st['is_public'])
3838
3839 def test_share_type_update_not_found(self):
3840 share_type = db_utils.create_share_type(name='st_update_test')
3841 db_api.share_type_destroy(self.ctxt, share_type['id'])
3842 values = {"name": "not_exist"}
3843 self.assertRaises(exception.ShareTypeNotFound,
3844 db_api.share_type_update,
3845 self.ctxt, share_type['id'], values)
3846
3847
3848 class MessagesDatabaseAPITestCase(test.TestCase):
3849
3850 def setUp(self):
3851 super(MessagesDatabaseAPITestCase, self).setUp()
3852 self.user_id = uuidutils.generate_uuid()
3853 self.project_id = uuidutils.generate_uuid()
3854 self.ctxt = context.RequestContext(
3855 user_id=self.user_id, project_id=self.project_id, is_admin=False)
3856
3857 def test_message_create(self):
3858 result = db_utils.create_message(project_id=self.project_id,
3859 action_id='001')
3860
3861 self.assertIsNotNone(result['id'])
3862
3863 def test_message_delete(self):
3864 result = db_utils.create_message(project_id=self.project_id,
3865 action_id='001')
3866
3867 db_api.message_destroy(self.ctxt, result)
3868
3869 self.assertRaises(exception.NotFound, db_api.message_get,
3870 self.ctxt, result['id'])
3871
3872 def test_message_get(self):
3873 message = db_utils.create_message(project_id=self.project_id,
3874 action_id='001')
3875
3876 result = db_api.message_get(self.ctxt, message['id'])
3877
3878 self.assertEqual(message['id'], result['id'])
3879 self.assertEqual(message['action_id'], result['action_id'])
3880 self.assertEqual(message['detail_id'], result['detail_id'])
3881 self.assertEqual(message['project_id'], result['project_id'])
3882 self.assertEqual(message['message_level'], result['message_level'])
3883
3884 def test_message_get_not_found(self):
3885 self.assertRaises(exception.MessageNotFound, db_api.message_get,
3886 self.ctxt, 'fake_id')
3887
3888 def test_message_get_different_project(self):
3889 message = db_utils.create_message(project_id='another-project',
3890 action_id='001')
3891
3892 self.assertRaises(exception.MessageNotFound, db_api.message_get,
3893 self.ctxt, message['id'])
3894
3895 def test_message_get_all(self):
3896 db_utils.create_message(project_id=self.project_id, action_id='001')
3897 db_utils.create_message(project_id=self.project_id, action_id='001')
3898 db_utils.create_message(project_id='another-project', action_id='001')
3899
3900 result = db_api.message_get_all(self.ctxt)
3901
3902 self.assertEqual(2, len(result))
3903
3904 def test_message_get_all_as_admin(self):
3905 db_utils.create_message(project_id=self.project_id, action_id='001')
3906 db_utils.create_message(project_id=self.project_id, action_id='001')
3907 db_utils.create_message(project_id='another-project', action_id='001')
3908
3909 result = db_api.message_get_all(self.ctxt.elevated())
3910
3911 self.assertEqual(3, len(result))
3912
3913 def test_message_get_all_with_filter(self):
3914 for i in ['001', '002', '002']:
3915 db_utils.create_message(project_id=self.project_id, action_id=i)
3916
3917 result = db_api.message_get_all(self.ctxt,
3918 filters={'action_id': '002'})
3919
3920 self.assertEqual(2, len(result))
3921
3922 def test_message_get_all_with_created_since_or_before_filter(self):
3923 now = timeutils.utcnow()
3924 db_utils.create_message(project_id=self.project_id,
3925 action_id='001',
3926 created_at=now - datetime.timedelta(seconds=1))
3927 db_utils.create_message(project_id=self.project_id,
3928 action_id='001',
3929 created_at=now + datetime.timedelta(seconds=1))
3930 db_utils.create_message(project_id=self.project_id,
3931 action_id='001',
3932 created_at=now + datetime.timedelta(seconds=2))
3933 result1 = db_api.message_get_all(self.ctxt,
3934 filters={'created_before': now})
3935 result2 = db_api.message_get_all(self.ctxt,
3936 filters={'created_since': now})
3937 self.assertEqual(1, len(result1))
3938 self.assertEqual(2, len(result2))
3939
3940 def test_message_get_all_with_invalid_sort_key(self):
3941 self.assertRaises(exception.InvalidInput, db_api.message_get_all,
3942 self.ctxt, sort_key='invalid_key')
3943
3944 def test_message_get_all_sorted_asc(self):
3945 ids = []
3946 for i in ['001', '002', '003']:
3947 msg = db_utils.create_message(project_id=self.project_id,
3948 action_id=i)
3949 ids.append(msg.id)
3950
3951 result = db_api.message_get_all(self.ctxt,
3952 sort_key='action_id',
3953 sort_dir='asc')
3954 result_ids = [r.id for r in result]
3955 self.assertEqual(result_ids, ids)
3956
3957 def test_message_get_all_with_limit_and_offset(self):
3958 for i in ['001', '002']:
3959 db_utils.create_message(project_id=self.project_id,
3960 action_id=i)
3961
3962 result = db_api.message_get_all(self.ctxt, limit=1, offset=1)
3963 self.assertEqual(1, len(result))
3964
3965 def test_message_get_all_sorted(self):
3966 ids = []
3967 for i in ['003', '002', '001']:
3968 msg = db_utils.create_message(project_id=self.project_id,
3969 action_id=i)
3970 ids.append(msg.id)
3971
3972 # Default the sort direction to descending
3973 result = db_api.message_get_all(self.ctxt, sort_key='action_id')
3974 result_ids = [r.id for r in result]
3975 self.assertEqual(result_ids, ids)
3976
3977 def test_cleanup_expired_messages(self):
3978 adm_context = self.ctxt.elevated()
3979
3980 now = timeutils.utcnow()
3981 db_utils.create_message(project_id=self.project_id,
3982 action_id='001',
3983 expires_at=now)
3984 db_utils.create_message(project_id=self.project_id,
3985 action_id='001',
3986 expires_at=now - datetime.timedelta(days=1))
3987 db_utils.create_message(project_id=self.project_id,
3988 action_id='001',
3989 expires_at=now + datetime.timedelta(days=1))
3990
3991 with mock.patch.object(timeutils, 'utcnow') as mock_time_now:
3992 mock_time_now.return_value = now
3993 db_api.cleanup_expired_messages(adm_context)
3994 messages = db_api.message_get_all(adm_context)
3995 self.assertEqual(2, len(messages))
3996
3997
3998 class BackendInfoDatabaseAPITestCase(test.TestCase):
3999
4000 def setUp(self):
4001 """Run before each test."""
4002 super(BackendInfoDatabaseAPITestCase, self).setUp()
4003 self.ctxt = context.get_admin_context()
4004
4005 def test_create(self):
4006 host = "fake_host"
4007 value = "fake_hash_value"
4008
4009 initial_data = db_api.backend_info_get(self.ctxt, host)
4010 db_api.backend_info_update(self.ctxt, host, value)
4011 actual_data = db_api.backend_info_get(self.ctxt, host)
4012
4013 self.assertIsNone(initial_data)
4014 self.assertEqual(value, actual_data['info_hash'])
4015 self.assertEqual(host, actual_data['host'])
4016
4017 def test_get(self):
4018 host = "fake_host"
4019 value = "fake_hash_value"
4020
4021 db_api.backend_info_update(self.ctxt, host, value, False)
4022 actual_result = db_api.backend_info_get(self.ctxt, host)
4023
4024 self.assertEqual(value, actual_result['info_hash'])
4025 self.assertEqual(host, actual_result['host'])
4026
4027 def test_delete(self):
4028 host = "fake_host"
4029 value = "fake_hash_value"
4030
4031 db_api.backend_info_update(self.ctxt, host, value)
4032 initial_data = db_api.backend_info_get(self.ctxt, host)
4033
4034 db_api.backend_info_update(self.ctxt, host, delete_existing=True)
4035 actual_data = db_api.backend_info_get(self.ctxt, host)
4036
4037 self.assertEqual(value, initial_data['info_hash'])
4038 self.assertEqual(host, initial_data['host'])
4039 self.assertIsNone(actual_data)
4040
4041 def test_double_update(self):
4042 host = "fake_host"
4043 value_1 = "fake_hash_value_1"
4044 value_2 = "fake_hash_value_2"
4045
4046 initial_data = db_api.backend_info_get(self.ctxt, host)
4047 db_api.backend_info_update(self.ctxt, host, value_1)
4048 db_api.backend_info_update(self.ctxt, host, value_2)
4049 actual_data = db_api.backend_info_get(self.ctxt, host)
4050
4051 self.assertIsNone(initial_data)
4052 self.assertEqual(value_2, actual_data['info_hash'])
4053 self.assertEqual(host, actual_data['host'])
4054
4055
4056 @ddt.ddt
4057 class ShareResourcesAPITestCase(test.TestCase):
4058
4059 def setUp(self):
4060 super(ShareResourcesAPITestCase, self).setUp()
4061 self.context = context.get_admin_context()
4062
4063 @ddt.data('controller-100', 'controller-0@otherstore03',
4064 'controller-0@otherstore01#pool200')
4065 def test_share_resources_host_update_no_matches(self, current_host):
4066 share_id = uuidutils.generate_uuid()
4067 share_network_id = uuidutils.generate_uuid()
4068 share_network_subnet_id = uuidutils.generate_uuid()
4069 if '@' in current_host:
4070 if '#' in current_host:
4071 new_host = 'new-controller-X@backendX#poolX'
4072 else:
4073 new_host = 'new-controller-X@backendX'
4074 else:
4075 new_host = 'new-controller-X'
4076 resources = [ # noqa
4077 # share instances
4078 db_utils.create_share_instance(
4079 share_id=share_id,
4080 host='controller-0@fancystore01#pool100',
4081 status=constants.STATUS_AVAILABLE),
4082 db_utils.create_share_instance(
4083 share_id=share_id,
4084 host='controller-0@otherstore02#pool100',
4085 status=constants.STATUS_ERROR),
4086 db_utils.create_share_instance(
4087 share_id=share_id,
4088 host='controller-2@beststore07#pool200',
4089 status=constants.STATUS_DELETING),
4090 # share groups
4091 db_utils.create_share_group(
4092 share_network_id=share_network_id,
4093 host='controller-0@fancystore01#pool200',
4094 status=constants.STATUS_AVAILABLE),
4095 db_utils.create_share_group(
4096 share_network_id=share_network_id,
4097 host='controller-0@otherstore02#pool100',
4098 status=constants.STATUS_ERROR),
4099 db_utils.create_share_group(
4100 share_network_id=share_network_id,
4101 host='controller-2@beststore07#pool100',
4102 status=constants.STATUS_DELETING),
4103 # share servers
4104 db_utils.create_share_server(
4105 share_network_subnet_id=share_network_subnet_id,
4106 host='controller-0@fancystore01',
4107 status=constants.STATUS_ACTIVE),
4108 db_utils.create_share_server(
4109 share_network_subnet_id=share_network_subnet_id,
4110 host='controller-0@otherstore02#pool100',
4111 status=constants.STATUS_ERROR),
4112 db_utils.create_share_server(
4113 share_network_subnet_id=share_network_subnet_id,
4114 host='controller-2@beststore07',
4115 status=constants.STATUS_DELETING),
4116
4117 ]
4118
4119 updates = db_api.share_resources_host_update(self.context,
4120 current_host,
4121 new_host)
4122
4123 expected_updates = {'instances': 0, 'servers': 0, 'groups': 0}
4124 self.assertDictMatch(expected_updates, updates)
4125 # validate that resources are unmodified:
4126 share_instances = db_api.share_instances_get_all(
4127 self.context, filters={'share_id': share_id})
4128 share_groups = db_api.share_group_get_all(
4129 self.context, filters={'share_network_id': share_network_id})
4130 share_servers = db_api._server_get_query(self.context).filter_by(
4131 share_network_subnet_id=share_network_subnet_id).all()
4132 self.assertEqual(3, len(share_instances))
4133 self.assertEqual(3, len(share_groups))
4134 self.assertEqual(3, len(share_servers))
4135 for share_instance in share_instances:
4136 self.assertTrue(not share_instance['host'].startswith(new_host))
4137 for share_group in share_groups:
4138 self.assertTrue(not share_group['host'].startswith(new_host))
4139 for share_server in share_servers:
4140 self.assertTrue(not share_server['host'].startswith(new_host))
4141
4142 @ddt.data(
4143 {'current_host': 'controller-2',
4144 'expected_updates': {'instances': 1, 'servers': 2, 'groups': 1}},
4145 {'current_host': 'controller-0@fancystore01',
4146 'expected_updates': {'instances': 2, 'servers': 1, 'groups': 2}},
4147 {'current_host': 'controller-0@fancystore01#pool100',
4148 'expected_updates': {'instances': 1, 'servers': 1, 'groups': 0}})
4149 @ddt.unpack
4150 def test_share_resources_host_update_partial_matches(self, current_host,
4151 expected_updates):
4152 share_id = uuidutils.generate_uuid()
4153 share_network_id = uuidutils.generate_uuid()
4154 share_network_subnet_id = uuidutils.generate_uuid()
4155 if '@' in current_host:
4156 if '#' in current_host:
4157 new_host = 'new-controller-X@backendX#poolX'
4158 else:
4159 new_host = 'new-controller-X@backendX'
4160 else:
4161 new_host = 'new-controller-X'
4162 total_updates_expected = (expected_updates['instances']
4163 + expected_updates['groups']
4164 + expected_updates['servers'])
4165 resources = [ # noqa
4166 # share instances
4167 db_utils.create_share_instance(
4168 share_id=share_id,
4169 host='controller-0@fancystore01#pool100',
4170 status=constants.STATUS_AVAILABLE),
4171 db_utils.create_share_instance(
4172 share_id=share_id,
4173 host='controller-0@fancystore01#pool200',
4174 status=constants.STATUS_ERROR),
4175 db_utils.create_share_instance(
4176 share_id=share_id,
4177 host='controller-2@beststore07#pool200',
4178 status=constants.STATUS_DELETING),
4179 # share groups
4180 db_utils.create_share_group(
4181 share_network_id=share_network_id,
4182 host='controller-0@fancystore01#pool101',
4183 status=constants.STATUS_ACTIVE),
4184 db_utils.create_share_group(
4185 share_network_id=share_network_id,
4186 host='controller-0@fancystore01#pool101',
4187 status=constants.STATUS_ERROR),
4188 db_utils.create_share_group(
4189 share_network_id=share_network_id,
4190 host='controller-2@beststore07#pool200',
4191 status=constants.STATUS_DELETING),
4192 # share servers
4193 db_utils.create_share_server(
4194 share_network_subnet_id=share_network_subnet_id,
4195 host='controller-0@fancystore01#pool100',
4196 status=constants.STATUS_ACTIVE),
4197 db_utils.create_share_server(
4198 share_network_subnet_id=share_network_subnet_id,
4199 host='controller-2@fancystore01',
4200 status=constants.STATUS_ERROR),
4201 db_utils.create_share_server(
4202 share_network_subnet_id=share_network_subnet_id,
4203 host='controller-2@beststore07#pool200',
4204 status=constants.STATUS_DELETING),
4205 ]
4206
4207 actual_updates = db_api.share_resources_host_update(
4208 self.context, current_host, new_host)
4209
4210 share_instances = db_api.share_instances_get_all(
4211 self.context, filters={'share_id': share_id})
4212 share_groups = db_api.share_group_get_all(
4213 self.context, filters={'share_network_id': share_network_id})
4214 share_servers = db_api._server_get_query(self.context).filter_by(
4215 share_network_subnet_id=share_network_subnet_id).all()
4216
4217 updated_resources = [
4218 res for res in share_instances + share_groups + share_servers
4219 if res['host'].startswith(new_host)
4220 ]
4221 self.assertEqual(expected_updates, actual_updates)
4222 self.assertEqual(total_updates_expected, len(updated_resources))
4223
4224 def test_share_instances_status_update(self):
4225 for i in range(1, 3):
4226 instances = [db_utils.create_share_instance(
4227 status=constants.STATUS_SERVER_MIGRATING, share_id='fake')]
4228 share_instance_ids = [instance['id'] for instance in instances]
4229 values = {'status': constants.STATUS_AVAILABLE}
4230
4231 db_api.share_instances_status_update(
4232 self.context, share_instance_ids, values)
4233
4234 instances = [
4235 db_api.share_instance_get(self.context, instance_id)
4236 for instance_id in share_instance_ids]
4237
4238 for instance in instances:
4239 self.assertEqual(constants.STATUS_AVAILABLE, instance['status'])
4240
4241 def test_share_snapshot_instances_status_update(self):
4242 share_instance = db_utils.create_share_instance(
4243 status=constants.STATUS_AVAILABLE, share_id='fake')
4244 for i in range(1, 3):
4245 instances = [db_utils.create_snapshot_instance(
4246 'fake_snapshot_id_1', status=constants.STATUS_CREATING,
4247 share_instance_id=share_instance['id'])]
4248
4249 snapshot_instance_ids = [instance['id'] for instance in instances]
4250 values = {'status': constants.STATUS_AVAILABLE}
4251
4252 db_api.share_snapshot_instances_status_update(
4253 self.context, snapshot_instance_ids, values)
4254
4255 instances = [
4256 db_api.share_snapshot_instance_get(self.context, instance_id)
4257 for instance_id in snapshot_instance_ids]
4258
4259 for instance in instances:
4260 self.assertEqual(constants.STATUS_AVAILABLE, instance['status'])
4261
4262 def test_share_and_snapshot_instances_status_update(self):
4263 share_instance = db_utils.create_share_instance(
4264 status=constants.STATUS_AVAILABLE, share_id='fake')
4265 share_instance_ids = [share_instance['id']]
4266 fake_session = db_api.get_session()
4267 for i in range(1, 3):
4268 snap_instances = [db_utils.create_snapshot_instance(
4269 'fake_snapshot_id_1', status=constants.STATUS_CREATING,
4270 share_instance_id=share_instance['id'])]
4271
4272 snapshot_instance_ids = [instance['id'] for instance in snap_instances]
4273 values = {'status': constants.STATUS_AVAILABLE}
4274
4275 mock_update_share_instances = self.mock_object(
4276 db_api, 'share_instances_status_update',
4277 mock.Mock(return_value=[share_instance]))
4278 mock_update_snap_instances = self.mock_object(
4279 db_api, 'share_snapshot_instances_status_update',
4280 mock.Mock(return_value=snap_instances))
4281 mock_get_session = self.mock_object(
4282 db_api, 'get_session', mock.Mock(return_value=fake_session))
4283
4284 updated_share_instances, updated_snap_instances = (
4285 db_api.share_and_snapshot_instances_status_update(
4286 self.context, values, share_instance_ids=share_instance_ids,
4287 snapshot_instance_ids=snapshot_instance_ids))
4288
4289 mock_get_session.assert_called()
4290 mock_update_share_instances.assert_called_once_with(
4291 self.context, share_instance_ids, values, session=fake_session)
4292 mock_update_snap_instances.assert_called_once_with(
4293 self.context, snapshot_instance_ids, values, session=fake_session)
4294 self.assertEqual(updated_share_instances, [share_instance])
4295 self.assertEqual(updated_snap_instances, snap_instances)
4296
4297 @ddt.data(
4298 {
4299 'share_instance_status': constants.STATUS_ERROR,
4300 'snap_instance_status': constants.STATUS_AVAILABLE,
4301 'expected_exc': exception.InvalidShareInstance
4302 },
4303 {
4304 'share_instance_status': constants.STATUS_AVAILABLE,
4305 'snap_instance_status': constants.STATUS_ERROR,
4306 'expected_exc': exception.InvalidShareSnapshotInstance
4307 }
4308 )
4309 @ddt.unpack
4310 def test_share_and_snapshot_instances_status_update_invalid_status(
4311 self, share_instance_status, snap_instance_status, expected_exc):
4312 share_instance = db_utils.create_share_instance(
4313 status=share_instance_status, share_id='fake')
4314 share_snapshot_instance = db_utils.create_snapshot_instance(
4315 'fake_snapshot_id_1', status=snap_instance_status,
4316 share_instance_id=share_instance['id'])
4317 share_instance_ids = [share_instance['id']]
4318 snap_instance_ids = [share_snapshot_instance['id']]
4319 values = {'status': constants.STATUS_AVAILABLE}
4320 fake_session = db_api.get_session()
4321
4322 mock_get_session = self.mock_object(
4323 db_api, 'get_session', mock.Mock(return_value=fake_session))
4324 mock_instances_get_all = self.mock_object(
4325 db_api, 'share_instances_get_all',
4326 mock.Mock(return_value=[share_instance]))
4327 mock_snap_instances_get_all = self.mock_object(
4328 db_api, 'share_snapshot_instance_get_all_with_filters',
4329 mock.Mock(return_value=[share_snapshot_instance]))
4330
4331 self.assertRaises(expected_exc,
4332 db_api.share_and_snapshot_instances_status_update,
4333 self.context,
4334 values,
4335 share_instance_ids=share_instance_ids,
4336 snapshot_instance_ids=snap_instance_ids,
4337 current_expected_status=constants.STATUS_AVAILABLE)
4338
4339 mock_get_session.assert_called()
4340 mock_instances_get_all.assert_called_once_with(
4341 self.context, filters={'instance_ids': share_instance_ids},
4342 session=fake_session)
4343 if snap_instance_status == constants.STATUS_ERROR:
4344 mock_snap_instances_get_all.assert_called_once_with(
4345 self.context, {'instance_ids': snap_instance_ids},
4346 session=fake_session)