diff --git a/influxdb/client.py b/influxdb/client.py index 4941ebeb..da14887d 100755 --- a/influxdb/client.py +++ b/influxdb/client.py @@ -8,6 +8,7 @@ import requests import requests.exceptions +from influxdb.resultset import ResultSet try: xrange @@ -109,7 +110,7 @@ def __init__(self, # if one doesn't care in that, then it can simply change its client # instance 'keep_json_response_order' attribute value (to a falsy one). # This will then eventually help for performance considerations. - _keep_json_response_order = True + _keep_json_response_order = False # NB: For "group by" query type : # This setting is actually necessary in order to have a consistent and # reproducible rsp format if you "group by" on more than 1 tag. @@ -122,33 +123,6 @@ def keep_json_response_order(self): def keep_json_response_order(self, new_value): self._keep_json_response_order = new_value - @staticmethod - def format_query_response(response): - """Returns a list of items from a query response""" - series = {} - if 'results' in response: - for result in response['results']: - if 'series' in result: - for row in result['series']: - items = [] - if 'name' in row: - name = row['name'] - tags = row.get('tags', None) - if tags: - name = (row['name'], tuple(tags.items())) - assert name not in series - series[name] = items - else: - series = items # Special case for system queries. - if 'columns' in row and 'values' in row: - columns = row['columns'] - for value in row['values']: - item = {} - for cur_col, field in enumerate(value): - item[columns[cur_col]] = field - items.append(item) - return series - def switch_database(self, database): """ switch_database() @@ -234,15 +208,16 @@ def query(self, query, params={}, expected_response_code=200, - database=None, - raw=False): + database=None): """ Query data :param params: Additional parameters to be passed to requests. :param database: Database to query, default to None. :param expected_response_code: Expected response code. Defaults to 200. - :param raw: Wether or not to return the raw influxdb response. + + :rtype : ResultSet + """ params['q'] = query @@ -261,8 +236,7 @@ def query(self, json_kw.update(object_pairs_hook=OrderedDict) data = response.json(**json_kw) - return (data if raw - else self.format_query_response(data)) + return ResultSet(data) def write_points(self, points, @@ -327,8 +301,7 @@ def get_list_database(self): """ Get the list of databases """ - rsp = self.query("SHOW DATABASES") - return [db['name'] for db in rsp['databases']] + return list(self.query("SHOW DATABASES")['databases']) def create_database(self, dbname): """ @@ -368,21 +341,31 @@ def get_list_retention_policies(self, database=None): """ Get the list of retention policies """ - return self.query( + rsp = self.query( "SHOW RETENTION POLICIES %s" % (database or self._database) ) + return list(rsp['results']) def get_list_series(self, database=None): """ Get the list of series """ - return self.query("SHOW SERIES", database=database) + rsp = self.query("SHOW SERIES", database=database) + series = [] + for serie in rsp.items(): + series.append( + { + "name": serie[0][0], + "tags": list(serie[1]) + } + ) + return series def get_list_users(self): """ Get the list of users """ - return self.query("SHOW USERS") + return list(self.query("SHOW USERS")) def delete_series(self, name, database=None): database = database or self._database diff --git a/influxdb/resultset.py b/influxdb/resultset.py new file mode 100644 index 00000000..9be87020 --- /dev/null +++ b/influxdb/resultset.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- + +_sentinel = object() + + +class ResultSet(object): + """A wrapper around series results """ + + def __init__(self, series): + self.raw = series + + def __getitem__(self, key): + """ + :param key: Either a serie name or a 2-tuple(serie_name, tags_dict) + If the given serie name is None then any serie (matching + the eventual given tags) will be given its points one + after the other. + :return: A generator yielding `Point`s matching the given key. + NB: + The order in which the points are yielded is actually undefined but + it might change.. + """ + if isinstance(key, tuple): + if 2 != len(key): + raise TypeError('only 2-tuples allowed') + name = key[0] + tags = key[1] + if not isinstance(tags, dict) and tags is not None: + raise TypeError('tags should be a dict') + else: + name = key + tags = None + # TODO(aviau): Fix for python 3.2 + # if not isinstance(name, (str, bytes, type(None))) \ + # and not isinstance(name, type("".decode("utf-8"))): + # raise TypeError('serie_name must be an str or None') + + for serie in self._get_series(): + serie_name = serie.get('name', 'results') + if serie_name is None: + # this is a "system" query or a query which + # doesn't return a name attribute. + # like 'show retention policies' .. + if key is None: + for point in serie['values']: + yield self.point_from_cols_vals( + serie['columns'], + point + ) + + elif name in (None, serie_name): + # by default if no tags was provided then + # we will matches every returned serie + serie_tags = serie.get('tags', {}) + if tags is None or self._tag_matches(serie_tags, tags): + for point in serie.get('values', []): + yield self.point_from_cols_vals( + serie['columns'], + point + ) + + def __repr__(self): + return str(self.raw) + + def __iter__(self): + """ Iterating a ResultSet will yield one dict instance per serie result. + """ + for key in self.keys(): + yield list(self.__getitem__(key)) + + def _tag_matches(self, tags, filter): + """Checks if all key/values in filter match in tags""" + for tag_name, tag_value in filter.items(): + # using _sentinel as I'm not sure that "None" + # could be used, because it could be a valid + # serie_tags value : when a serie has no such tag + # then I think it's set to /null/None/.. TBC.. + serie_tag_value = tags.get(tag_name, _sentinel) + if serie_tag_value != tag_value: + return False + return True + + def _get_series(self): + """Returns all series""" + series = [] + try: + for result in self.raw['results']: + series.extend(result['series']) + except KeyError: + pass + return series + + def __len__(self): + return len(self.keys()) + + def keys(self): + keys = [] + for serie in self._get_series(): + keys.append( + (serie.get('name', 'results'), serie.get('tags', None)) + ) + return keys + + def items(self): + items = [] + for serie in self._get_series(): + serie_key = (serie.get('name', 'results'), serie.get('tags', None)) + items.append( + (serie_key, self[serie_key]) + ) + return items + + @staticmethod + def point_from_cols_vals(cols, vals): + point = {} + for col_index, col_name in enumerate(cols): + point[col_name] = vals[col_index] + return point diff --git a/tests/influxdb/client_test.py b/tests/influxdb/client_test.py index 436e5aad..9bb8bec8 100644 --- a/tests/influxdb/client_test.py +++ b/tests/influxdb/client_test.py @@ -265,12 +265,11 @@ def test_query(self): "http://localhost:8086/query", text=example_response ) - self.assertDictEqual( - self.cli.query('select * from foo'), - {'cpu_load_short': - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], - 'sdfsdfsdf': - [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]} + rs = self.cli.query('select * from foo') + + self.assertListEqual( + list(rs['cpu_load_short']), + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}] ) @unittest.skip('Not implemented for 0.9') @@ -349,14 +348,19 @@ def test_drop_database_fails(self): cli.drop_database('old_db') def test_get_list_database(self): - data = {'results': [{'series': [ - {'name': 'databases', 'columns': ['name'], - 'values': [['mydb'], ['myotherdb']]}]}]} + data = {'results': [ + {'series': [ + {'name': 'databases', + 'values': [ + ['new_db_1'], + ['new_db_2']], + 'columns': ['name']}]} + ]} with _mocked_session(self.cli, 'get', 200, json.dumps(data)): self.assertListEqual( self.cli.get_list_database(), - ['mydb', 'myotherdb'] + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) @raises(Exception) @@ -367,8 +371,9 @@ def test_get_list_database_fails(self): def test_get_list_series(self): example_response = \ - '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\ - ' "columns": ["name", "duration", "replicaN"]}]}]}' + '{"results": [{"series": [{"name": "cpu_load_short", "columns": ' \ + '["_id", "host", "region"], "values": ' \ + '[[1, "server01", "us-west"]]}]}]}' with requests_mock.Mocker() as m: m.register_uri( @@ -376,10 +381,13 @@ def test_get_list_series(self): "http://localhost:8086/query", text=example_response ) + self.assertListEqual( self.cli.get_list_series(), - [{'duration': '24h0m0s', - 'name': 'fsfdsdf', 'replicaN': 2}] + [{'name': 'cpu_load_short', + 'tags': [ + {'host': 'server01', '_id': 1, 'region': 'us-west'} + ]}] ) def test_create_retention_policy_default(self): diff --git a/tests/influxdb/client_test_with_server.py b/tests/influxdb/client_test_with_server.py index 968d234b..6bdbb4af 100644 --- a/tests/influxdb/client_test_with_server.py +++ b/tests/influxdb/client_test_with_server.py @@ -11,12 +11,9 @@ """ from __future__ import print_function -import random - import datetime import distutils.spawn from functools import partial -import itertools import os import re import shutil @@ -289,7 +286,7 @@ def test_create_database(self): self.assertIsNone(self.cli.create_database('new_db_2')) self.assertEqual( self.cli.get_list_database(), - ['new_db_1', 'new_db_2'] + [{'name': 'new_db_1'}, {'name': 'new_db_2'}] ) def test_create_database_fails(self): @@ -303,7 +300,7 @@ def test_create_database_fails(self): def test_drop_database(self): self.test_create_database() self.assertIsNone(self.cli.drop_database('new_db_1')) - self.assertEqual(['new_db_2'], self.cli.get_list_database()) + self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database()) def test_drop_database_fails(self): with self.assertRaises(InfluxDBClientError) as ctx: @@ -352,29 +349,46 @@ def test_write_check_read(self): ) def test_write_points(self): - ''' same as test_write() but with write_points \o/ ''' + """ same as test_write() but with write_points \o/ """ self.assertIs(True, self.cli.write_points(dummy_point)) def test_write_points_check_read(self): - ''' same as test_write_check_read() but with write_points \o/ ''' + """ same as test_write_check_read() but with write_points \o/ """ self.test_write_points() time.sleep(1) # same as test_write_check_read() + rsp = self.cli.query('SELECT * FROM cpu_load_short') + self.assertEqual( - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - self.cli.query('SELECT * FROM cpu_load_short')) + list(rsp), + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]] + ) + + rsp2 = list(rsp['cpu_load_short']) + self.assertEqual(len(rsp2), 1) + pt = rsp2[0] + + self.assertEqual( + pt, + {'time': '2009-11-10T23:00:00Z', 'value': 0.64} + ) def test_write_multiple_points_different_series(self): self.assertIs(True, self.cli.write_points(dummy_points)) time.sleep(1) + rsp = self.cli.query('SELECT * FROM cpu_load_short') + lrsp = list(rsp) + self.assertEqual( - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - self.cli.query('SELECT * FROM cpu_load_short')) + [[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]], + lrsp + ) + + rsp = list(self.cli.query('SELECT * FROM memory')) + self.assertEqual( - {'memory': [ - {'time': '2009-11-10T23:01:35Z', 'value': 33}]}, - self.cli.query('SELECT * FROM memory')) + rsp, + [[{'value': 33, 'time': '2009-11-10T23:01:35Z'}]] + ) @unittest.skip('Not implemented for 0.9') def test_write_points_batch(self): @@ -419,7 +433,6 @@ def test_write_points_with_precision(self): ('u', base_s_regex + '\.\d{6}Z', 1), ('ms', base_s_regex + '\.\d{3}Z', 1), ('s', base_s_regex + 'Z', 1), - ('m', base_regex + '\d{2}:00Z', 60), # ('h', base_regex + '00:00Z', ), # that would require a sleep of possibly up to 3600 secs (/ 2 ?).. @@ -457,17 +470,20 @@ def test_write_points_with_precision(self): else: pass # sys.stderr.write('ok !\n') - sleep_time = 0 + + # sys.stderr.write('sleeping %s..\n' % sleep_time) if sleep_time: - # sys.stderr.write('sleeping %s..\n' % sleep_time) time.sleep(sleep_time) rsp = self.cli.query('SELECT * FROM cpu_load_short', database=db) - # sys.stderr.write('precision=%s rsp_timestamp = %r\n' % ( # precision, rsp['cpu_load_short'][0]['time'])) - m = re.match(expected_regex, rsp['cpu_load_short'][0]['time']) + + m = re.match( + expected_regex, + list(rsp['cpu_load_short'])[0]['time'] + ) self.assertIsNotNone(m) self.cli.drop_database(db) @@ -498,14 +514,18 @@ def test_query_chunked(self): def test_get_list_series_empty(self): rsp = self.cli.get_list_series() - self.assertEqual({}, rsp) + self.assertEqual([], rsp) def test_get_list_series_non_empty(self): self.cli.write_points(dummy_point) rsp = self.cli.get_list_series() + self.assertEqual( - {'cpu_load_short': [ - {'region': 'us-west', 'host': 'server01', '_id': 1}]}, + [ + {'name': 'cpu_load_short', + 'tags': [{'host': 'server01', '_id': 1, + 'region': 'us-west'}]} + ], rsp ) @@ -513,23 +533,26 @@ def test_default_retention_policy(self): rsp = self.cli.get_list_retention_policies() self.assertEqual( [ - {'duration': '0', 'default': True, - 'replicaN': 1, 'name': 'default'}], + {'name': 'default', + 'duration': '0', + 'replicaN': 1, + 'default': True} + ], rsp ) def test_create_retention_policy_default(self): - rsp = self.cli.create_retention_policy('somename', '1d', 4, - default=True) - self.assertIsNone(rsp) + self.cli.create_retention_policy('somename', '1d', 4, default=True) + self.cli.create_retention_policy('another', '2d', 3, default=False) rsp = self.cli.get_list_retention_policies() + self.assertEqual( - [ - {'duration': '0', 'default': False, - 'replicaN': 1, 'name': 'default'}, - {'duration': '24h0m0s', 'default': True, - 'replicaN': 4, 'name': 'somename'} - ], + [{'duration': '48h0m0s', 'default': False, + 'replicaN': 3, 'name': 'another'}, + {'duration': '0', 'default': False, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': True, + 'replicaN': 4, 'name': 'somename'}], rsp ) @@ -537,17 +560,15 @@ def test_create_retention_policy(self): self.cli.create_retention_policy('somename', '1d', 4) rsp = self.cli.get_list_retention_policies() self.assertEqual( - [ - {'duration': '0', 'default': True, 'replicaN': 1, - 'name': 'default'}, - {'duration': '24h0m0s', 'default': False, 'replicaN': 4, - 'name': 'somename'} - ], + [{'duration': '0', 'default': True, + 'replicaN': 1, 'name': 'default'}, + {'duration': '24h0m0s', 'default': False, + 'replicaN': 4, 'name': 'somename'}], rsp ) def test_issue_143(self): - pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') + pt = partial(point, 'a_serie_name', timestamp='2015-03-30T16:16:37Z') pts = [ pt(value=15), pt(tags={'tag_1': 'value1'}, value=5), @@ -555,15 +576,14 @@ def test_issue_143(self): ] self.cli.write_points(pts) time.sleep(1) - rsp = self.cli.query('SELECT * FROM serie GROUP BY tag_1') - # print(rsp, file=sys.stderr) - self.assertEqual({ - ('serie', (('tag_1', ''),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 15}], - ('serie', (('tag_1', 'value1'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 5}], - ('serie', (('tag_1', 'value2'),)): [ - {'time': '2015-03-30T16:16:37Z', 'value': 10}]}, + rsp = list(self.cli.query('SELECT * FROM a_serie_name GROUP BY tag_1')) + + self.assertEqual( + [ + [{'value': 15, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] + ], rsp ) @@ -577,74 +597,38 @@ def test_issue_143(self): self.cli.write_points(pts) time.sleep(1) rsp = self.cli.query('SELECT * FROM serie2 GROUP BY tag1,tag2') - # print(rsp, file=sys.stderr) + self.assertEqual( - { - ('serie2', (('tag1', 'value1'), ('tag2', 'v1'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 0} - ], - ('serie2', (('tag1', 'value1'), ('tag2', 'v2'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 5} - ], - ('serie2', (('tag1', 'value2'), ('tag2', 'v1'))): [ - {'time': '2015-03-30T16:16:37Z', 'value': 10}] - }, - rsp + [ + [{'value': 0, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 5, 'time': '2015-03-30T16:16:37Z'}], + [{'value': 10, 'time': '2015-03-30T16:16:37Z'}] + ], + list(rsp) ) - def test_tags_json_order(self): - n_pts = 100 - n_tags = 5 # that will make 120 possible orders (fact(5) == 120) - all_tags = ['tag%s' % i for i in range(n_tags)] - n_tags_values = 1 + n_tags // 3 - all_tags_values = ['value%s' % random.randint(0, i) - for i in range(n_tags_values)] - pt = partial(point, 'serie', timestamp='2015-03-30T16:16:37Z') + all_tag2_equal_v1 = list(rsp[None, {'tag2': 'v1'}]) + + self.assertEqual( + [{'value': 0, 'time': '2015-03-30T16:16:37Z'}, + {'value': 10, 'time': '2015-03-30T16:16:37Z'}], + all_tag2_equal_v1, + ) + + def test_query_multiple_series(self): + pt = partial(point, 'serie1', timestamp='2015-03-30T16:16:37Z') pts = [ - pt(value=random.randint(0, 100)) - for _ in range(n_pts) + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0), ] - for pt in pts: - tags = pt['tags'] = {} - for tag in all_tags: - tags[tag] = random.choice(all_tags_values) + self.cli.write_points(pts) + pt = partial(point, 'serie2', timestamp='1970-03-30T16:16:37Z') + pts = [ + pt(tags={'tag1': 'value1', 'tag2': 'v1'}, + value=0, data1=33, data2="bla"), + ] self.cli.write_points(pts) - time.sleep(1) - # Influxd, when queried with a "group by tag1(, tag2, ..)" and as far - # as we currently see, always returns the tags (alphabetically-) - # ordered by their name in the json response.. - # That might not always be the case so here we will also be - # asserting that behavior. - expected_ordered_tags = tuple(sorted(all_tags)) - - # try all the possible orders of tags for the group by in the query: - for tags in itertools.permutations(all_tags): - query = ('SELECT * FROM serie ' - 'GROUP BY %s' % ','.join(tags)) - rsp = self.cli.query(query) - # and verify that, for each "serie_key" in the response, - # the tags names are ordered as we expect it: - for serie_key in rsp: - # first also asserts that the serie key is a 2-tuple: - self.assertTrue(isinstance(serie_key, tuple)) - self.assertEqual(2, len(serie_key)) - # also assert that the first component is an str instance: - self.assertIsInstance(serie_key[0], type(b''.decode())) - self.assertIsInstance(serie_key[1], tuple) - # also assert that the number of items in the second component - # is the number of tags requested in the group by actually, - # and that each one has correct format/type/.. - self.assertEqual(n_tags, len(serie_key[1])) - for tag_data in serie_key[1]: - self.assertIsInstance(tag_data, tuple) - self.assertEqual(2, len(tag_data)) - tag_name = tag_data[0] - self.assertIsInstance(tag_name, type(b''.decode())) - # then check the tags order: - rsp_tags = tuple(t[0] for t in serie_key[1]) - self.assertEqual(expected_ordered_tags, rsp_tags) ############################################################################ @@ -673,7 +657,6 @@ def test_write_points_udp(self): self.assertEqual( # this is dummy_points : - {'cpu_load_short': [ - {'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]}, - rsp + [{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}], + list(rsp['cpu_load_short']) ) diff --git a/tests/influxdb/resultset_test.py b/tests/influxdb/resultset_test.py new file mode 100644 index 00000000..0f047c8b --- /dev/null +++ b/tests/influxdb/resultset_test.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- + +import unittest + +from influxdb.resultset import ResultSet + + +class TestResultSet(unittest.TestCase): + + def setUp(self): + self.query_response = { + "results": [ + {"series": [{"name": "cpu_load_short", + "tags": {"host": "server01", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}, + {"name": "cpu_load_short", + "tags": {"host": "server02", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}, + {"name": "other_serie", + "tags": {"host": "server01", + "region": "us-west"}, + "columns": ["time", "value"], + "values": [ + ["2015-01-29T21:51:28.968422294Z", 0.64] + ]}]} + ] + } + self.rs = ResultSet(self.query_response) + + def test_filter_by_name(self): + self.assertEqual( + list(self.rs['cpu_load_short']), + [ + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} + ] + ) + + def test_filter_by_tags(self): + self.assertEqual( + list(self.rs[('cpu_load_short', {"host": "server01"})]), + [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}] + ) + + self.assertEqual( + list(self.rs[('cpu_load_short', {"region": "us-west"})]), + [ + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}, + {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'} + ] + ) + + def test_keys(self): + self.assertEqual( + self.rs.keys(), + [ + ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}), + ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}), + ('other_serie', {'host': 'server01', 'region': 'us-west'}) + ] + ) + + def test_len(self): + self.assertEqual( + len(self.rs), + 3 + ) + + def test_items(self): + items = list(self.rs.items()) + items_lists = [(item[0], list(item[1])) for item in items] + + self.assertEqual( + items_lists, + [ + ( + ('cpu_load_short', + {'host': 'server01', 'region': 'us-west'}), + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] + ), + ( + ('cpu_load_short', + {'host': 'server02', 'region': 'us-west'}), + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] + ), + ( + ('other_serie', + {'host': 'server01', 'region': 'us-west'}), + [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}] + ) + ] + ) + + def test_point_from_cols_vals(self): + cols = ['col1', 'col2'] + vals = [1, '2'] + + point = ResultSet.point_from_cols_vals(cols, vals) + self.assertDictEqual( + point, + {'col1': 1, 'col2': '2'} + ) + + def test_system_query(self): + rs = ResultSet( + {'results': [ + {'series': [ + {'values': [['another', '48h0m0s', 3, False], + ['default', '0', 1, False], + ['somename', '24h0m0s', 4, True]], + 'columns': ['name', 'duration', + 'replicaN', 'default']}]} + ] + } + ) + + self.assertEqual( + rs.keys(), + [('results', None)] + ) + + self.assertEqual( + list(rs['results']), + [ + {'duration': '48h0m0s', 'default': False, 'replicaN': 3, + 'name': 'another'}, + {'duration': '0', 'default': False, 'replicaN': 1, + 'name': 'default'}, + {'duration': '24h0m0s', 'default': True, 'replicaN': 4, + 'name': 'somename'} + ] + )