diff --git a/splitio/api/client.py b/splitio/api/client.py index d0bda3e7..5d3ef6f4 100644 --- a/splitio/api/client.py +++ b/splitio/api/client.py @@ -133,7 +133,7 @@ def set_telemetry_data(self, metric_name, telemetry_runtime_producer): self._metric_name = metric_name def is_sdk_endpoint_overridden(self): - return self._urls['sdk'] == SDK_URL + return self._urls['sdk'] != SDK_URL def _get_headers(self, extra_headers, sdk_key): headers = _build_basic_headers(sdk_key) diff --git a/splitio/api/splits.py b/splitio/api/splits.py index 4de9204a..619306a1 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -9,6 +9,7 @@ from splitio.models.telemetry import HTTPExceptionsAndLatencies from splitio.util.time import utctime_ms from splitio.spec import SPEC_VERSION +from splitio.sync import util _LOGGER = logging.getLogger(__name__) _SPEC_1_1 = "1.1" @@ -36,15 +37,20 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._spec_version = SPEC_VERSION self._last_proxy_check_timestamp = 0 self.clear_storage = False + self._old_spec_since = None - def _convert_to_new_spec(self, body): - return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, - "rbs": {"d": [], "s": -1, "t": -1}} - - def _check_last_proxy_check_timestamp(self): + def _check_last_proxy_check_timestamp(self, since): if self._spec_version == _SPEC_1_1 and ((utctime_ms() - self._last_proxy_check_timestamp) >= _PROXY_CHECK_INTERVAL_MILLISECONDS_SS): _LOGGER.info("Switching to new Feature flag spec (%s) and fetching.", SPEC_VERSION); self._spec_version = SPEC_VERSION + self._old_spec_since = since + + def _check_old_spec_since(self, change_number): + if self._spec_version == _SPEC_1_1 and self._old_spec_since is not None: + since = self._old_spec_since + self._old_spec_since = None + return since + return change_number class SplitsAPI(SplitsAPIBase): # pylint: disable=too-few-public-methods @@ -80,7 +86,9 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - self._check_last_proxy_check_timestamp() + self._check_last_proxy_check_timestamp(change_number) + change_number = self._check_old_spec_since(change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', @@ -91,7 +99,7 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): ) if 200 <= response.status_code < 300: if self._spec_version == _SPEC_1_1: - return self._convert_to_new_spec(json.loads(response.body)) + return util.convert_to_new_spec(json.loads(response.body)) self.clear_storage = self._last_proxy_check_timestamp != 0 self._last_proxy_check_timestamp = 0 @@ -148,7 +156,9 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - self._check_last_proxy_check_timestamp() + self._check_last_proxy_check_timestamp(change_number) + change_number = self._check_old_spec_since(change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( 'sdk', @@ -159,7 +169,7 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): ) if 200 <= response.status_code < 300: if self._spec_version == _SPEC_1_1: - return self._convert_to_new_spec(json.loads(response.body)) + return util.convert_to_new_spec(json.loads(response.body)) self.clear_storage = self._last_proxy_check_timestamp != 0 self._last_proxy_check_timestamp = 0 diff --git a/splitio/client/factory.py b/splitio/client/factory.py index 7c56819f..f6070243 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -564,7 +564,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl synchronizers = SplitSynchronizers( SplitSynchronizer(apis['splits'], storages['splits'], storages['rule_based_segments']), - SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments']), + SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments'], storages['rule_based_segments']), ImpressionSynchronizer(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), EventSynchronizer(apis['events'], storages['events'], cfg['eventsBulkSize']), @@ -693,7 +693,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= synchronizers = SplitSynchronizers( SplitSynchronizerAsync(apis['splits'], storages['splits'], storages['rule_based_segments']), - SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments']), + SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments'], storages['rule_based_segments']), ImpressionSynchronizerAsync(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), EventSynchronizerAsync(apis['events'], storages['events'], cfg['eventsBulkSize']), diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 3bd11512..d3e05f78 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -6,11 +6,12 @@ from splitio.models.grammar.condition import ConditionType from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher -from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher +from splitio.models.grammar.matchers import RuleBasedSegmentMatcher +from splitio.models.rule_based_segments import SegmentType from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'rbs_segments']) _LOGGER = logging.getLogger(__name__) @@ -114,47 +115,24 @@ def context_for(self, key, feature_names): :rtype: EvaluationContext """ pending = set(feature_names) + pending_rbs = set() splits = {} + rb_segments = {} pending_memberships = set() - pending_rbs_memberships = set() - while pending: + while pending or pending_rbs: fetched = self._flag_storage.fetch_many(list(pending)) - features = filter_missing(fetched) - splits.update(features) - pending = set() - for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) - pending.update(filter(lambda f: f not in splits, cf)) - pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - - rbs_segment_memberships = {} - rbs_segment_conditions = {} - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - rbs_segment_obj = self._rbs_segment_storage.get(rbs_segment) - pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) - - key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() - segment_memberhsip = False - for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): - if self._segment_storage.segment_contains(segment_name, key): - segment_memberhsip = True - break - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) - + fetched_rbs = self._rbs_segment_storage.fetch_many(list(pending_rbs)) + features, rbsegments, splits, rb_segments = update_objects(fetched, fetched_rbs, splits, rb_segments) + pending, pending_memberships, pending_rbs = get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships) + return EvaluationContext( splits, { segment: self._segment_storage.segment_contains(segment, key) for segment in pending_memberships }, - rbs_segment_memberships, - rbs_segment_conditions + rb_segments ) + class AsyncEvaluationDataFactory: @@ -173,60 +151,36 @@ async def context_for(self, key, feature_names): :rtype: EvaluationContext """ pending = set(feature_names) + pending_rbs = set() splits = {} + rb_segments = {} pending_memberships = set() - pending_rbs_memberships = set() - while pending: + while pending or pending_rbs: fetched = await self._flag_storage.fetch_many(list(pending)) - features = filter_missing(fetched) - splits.update(features) - pending = set() - for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) - pending.update(filter(lambda f: f not in splits, cf)) - pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - - rbs_segment_memberships = {} - rbs_segment_conditions = {} - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) - pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) - - key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() - segment_memberhsip = False - for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): - if await self._segment_storage.segment_contains(segment_name, key): - segment_memberhsip = True - break - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) + fetched_rbs = await self._rbs_segment_storage.fetch_many(list(pending_rbs)) + features, rbsegments, splits, rb_segments = update_objects(fetched, fetched_rbs, splits, rb_segments) + pending, pending_memberships, pending_rbs = get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships) segment_names = list(pending_memberships) segment_memberships = await asyncio.gather(*[ self._segment_storage.segment_contains(segment, key) for segment in segment_names ]) + return EvaluationContext( splits, dict(zip(segment_names, segment_memberships)), - rbs_segment_memberships, - rbs_segment_conditions + rb_segments ) - -def get_dependencies(feature): +def get_dependencies(object): """ :rtype: tuple(list, list) """ feature_names = [] segment_names = [] rbs_segment_names = [] - for condition in feature.conditions: + for condition in object.conditions: for matcher in condition.matchers: if isinstance(matcher,RuleBasedSegmentMatcher): rbs_segment_names.append(matcher._rbs_segment_name) @@ -239,3 +193,34 @@ def get_dependencies(feature): def filter_missing(features): return {k: v for (k, v) in features.items() if v is not None} + +def get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships): + pending = set() + pending_rbs = set() + for feature in features.values(): + cf, cs, crbs = get_dependencies(feature) + pending.update(filter(lambda f: f not in splits, cf)) + pending_memberships.update(cs) + pending_rbs.update(filter(lambda f: f not in rb_segments, crbs)) + + for rb_segment in rbsegments.values(): + cf, cs, crbs = get_dependencies(rb_segment) + pending.update(filter(lambda f: f not in splits, cf)) + pending_memberships.update(cs) + for excluded_segment in rb_segment.excluded.get_excluded_segments(): + if excluded_segment.type == SegmentType.STANDARD: + pending_memberships.add(excluded_segment.name) + else: + pending_rbs.update(filter(lambda f: f not in rb_segments, [excluded_segment.name])) + pending_rbs.update(filter(lambda f: f not in rb_segments, crbs)) + + return pending, pending_memberships, pending_rbs + +def update_objects(fetched, fetched_rbs, splits, rb_segments): + features = filter_missing(fetched) + rbsegments = filter_missing(fetched_rbs) + splits.update(features) + rb_segments.update(rbsegments) + + return features, rbsegments, splits, rb_segments + \ No newline at end of file diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 0e0aa665..06baf4b2 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -1,5 +1,6 @@ """Rule based segment matcher classes.""" from splitio.models.grammar.matchers.base import Matcher +from splitio.models.rule_based_segments import SegmentType class RuleBasedSegmentMatcher(Matcher): @@ -29,15 +30,15 @@ def _match(self, key, attributes=None, context=None): if self._rbs_segment_name == None: return False - # Check if rbs segment has exclusions - if context['ec'].segment_rbs_memberships.get(self._rbs_segment_name): + rb_segment = context['ec'].rbs_segments.get(self._rbs_segment_name) + + if key in rb_segment.excluded.get_excluded_keys(): + return False + + if self._match_dep_rb_segments(rb_segment.excluded.get_excluded_segments(), key, attributes, context): return False - for parsed_condition in context['ec'].segment_rbs_conditions.get(self._rbs_segment_name): - if parsed_condition.matches(key, attributes, context): - return True - - return False + return self._match_conditions(rb_segment.conditions, key, attributes, context) def _add_matcher_specific_properties_to_json(self): """Return UserDefinedSegment specific properties.""" @@ -45,4 +46,26 @@ def _add_matcher_specific_properties_to_json(self): 'userDefinedSegmentMatcherData': { 'segmentName': self._rbs_segment_name } - } \ No newline at end of file + } + + def _match_conditions(self, rbs_segment_conditions, key, attributes, context): + for parsed_condition in rbs_segment_conditions: + if parsed_condition.matches(key, attributes, context): + return True + + return False + + def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context): + for excluded_rb_segment in excluded_rb_segments: + if excluded_rb_segment.type == SegmentType.STANDARD: + if context['ec'].segment_memberships[excluded_rb_segment.name]: + return True + else: + excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) + if key in excluded_segment.excluded.get_excluded_keys(): + return False + + if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): + return True + + return self._match_conditions(excluded_segment.conditions, key, attributes, context) diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index 5914983c..f7bf3f4d 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -1,5 +1,6 @@ """RuleBasedSegment module.""" +from enum import Enum import logging from splitio.models import MatcherNotFoundException @@ -9,6 +10,12 @@ _LOGGER = logging.getLogger(__name__) +class SegmentType(Enum): + """Segment type.""" + + STANDARD = "standard" + RULE_BASED = "rule-based" + class RuleBasedSegment(object): """RuleBasedSegment object class.""" @@ -104,6 +111,16 @@ def from_raw(raw_rule_based_segment): _LOGGER.error(str(e)) _LOGGER.debug("Using default conditions template for feature flag: %s", raw_rule_based_segment['name']) conditions = [condition.from_raw(_DEFAULT_CONDITIONS_TEMPLATE)] + + if raw_rule_based_segment.get('excluded') == None: + raw_rule_based_segment['excluded'] = {'keys': [], 'segments': []} + + if raw_rule_based_segment['excluded'].get('keys') == None: + raw_rule_based_segment['excluded']['keys'] = [] + + if raw_rule_based_segment['excluded'].get('segments') == None: + raw_rule_based_segment['excluded']['segments'] = [] + return RuleBasedSegment( raw_rule_based_segment['name'], raw_rule_based_segment['trafficTypeName'], @@ -125,7 +142,7 @@ def __init__(self, keys, segments): :type segments: List """ self._keys = keys - self._segments = segments + self._segments = [ExcludedSegment(segment['name'], segment['type']) for segment in segments] def get_excluded_keys(self): """Return excluded keys.""" @@ -135,9 +152,44 @@ def get_excluded_segments(self): """Return excluded segments""" return self._segments + def get_excluded_standard_segments(self): + """Return excluded segments""" + to_return = [] + for segment in self._segments: + if segment.type == SegmentType.STANDARD: + to_return.append(segment.name) + return to_return + def to_json(self): """Return a JSON representation of this object.""" return { 'keys': self._keys, 'segments': self._segments } + +class ExcludedSegment(object): + + def __init__(self, name, type): + """ + Class constructor. + + :param name: rule based segment name + :type name: str + :param type: segment type + :type type: str + """ + self._name = name + try: + self._type = SegmentType(type) + except ValueError: + self._type = SegmentType.STANDARD + + @property + def name(self): + """Return name.""" + return self._name + + @property + def type(self): + """Return type.""" + return self._type diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 817e7d86..e1740b72 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -230,7 +230,10 @@ def contains(self, segment_names): """ with self._lock: return set(segment_names).issubset(self._rule_based_segments.keys()) - + + def fetch_many(self, segment_names): + return {rb_segment_name: self.get(rb_segment_name) for rb_segment_name in segment_names} + class InMemoryRuleBasedSegmentStorageAsync(RuleBasedSegmentsStorage): """InMemory implementation of a feature flag storage base.""" def __init__(self): @@ -243,7 +246,7 @@ async def clear(self): """ Clear storage """ - with self._lock: + async with self._lock: self._rule_based_segments = {} self._change_number = -1 @@ -354,6 +357,9 @@ async def contains(self, segment_names): async with self._lock: return set(segment_names).issubset(self._rule_based_segments.keys()) + async def fetch_many(self, segment_names): + return {rb_segment_name: await self.get(rb_segment_name) for rb_segment_name in segment_names} + class InMemorySplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage base.""" @@ -702,7 +708,7 @@ async def clear(self): """ Clear storage """ - with self._lock: + async with self._lock: self._feature_flags = {} self._change_number = -1 self._traffic_types = Counter() diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index c27a92fd..36b27d7d 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -177,6 +177,25 @@ def get_segment_names(self): _LOGGER.error('Error getting rule based segments names from storage') _LOGGER.debug('Error: ', exc_info=True) return None + + def fetch_many(self, rb_segment_names): + """ + Retrieve rule based segments. + + :param rb_segment_names: Names of the rule based segments to fetch. + :type rb_segment_names: list(str) + + :return: A dict with rule based segment objects parsed from queue. + :rtype: dict(rb_segment_names, splitio.models.rile_based_segment.RuleBasedSegment) + """ + try: + prefix_added = [self._prefix.format(segment_name=rb_segment_name) for rb_segment_name in rb_segment_names] + return {rb_segment['name']: rule_based_segments.from_raw(rb_segment) for rb_segment in self._pluggable_adapter.get_many(prefix_added)} + + except Exception: + _LOGGER.error('Error getting rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None class PluggableRuleBasedSegmentsStorageAsync(PluggableRuleBasedSegmentsStorageBase): """Pluggable storage for rule based segments.""" @@ -256,6 +275,25 @@ async def get_segment_names(self): _LOGGER.debug('Error: ', exc_info=True) return None + async def fetch_many(self, rb_segment_names): + """ + Retrieve rule based segments. + + :param rb_segment_names: Names of the rule based segments to fetch. + :type rb_segment_names: list(str) + + :return: A dict with rule based segment objects parsed from queue. + :rtype: dict(rb_segment_names, splitio.models.rile_based_segment.RuleBasedSegment) + """ + try: + prefix_added = [self._prefix.format(segment_name=rb_segment_name) for rb_segment_name in rb_segment_names] + return {rb_segment['name']: rule_based_segments.from_raw(rb_segment) for rb_segment in await self._pluggable_adapter.get_many(prefix_added)} + + except Exception: + _LOGGER.error('Error getting rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + class PluggableSplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage.""" diff --git a/splitio/storage/redis.py b/splitio/storage/redis.py index e5398cf7..09ddee29 100644 --- a/splitio/storage/redis.py +++ b/splitio/storage/redis.py @@ -131,6 +131,35 @@ def get_large_segment_names(self): """ pass + def fetch_many(self, segment_names): + """ + Retrieve rule based segment. + + :param segment_names: Names of the rule based segments to fetch. + :type segment_names: list(str) + + :return: A dict with rule based segment objects parsed from redis. + :rtype: dict(segment_name, splitio.models.rule_based_segment.RuleBasedSegment) + """ + to_return = dict() + try: + keys = [self._get_key(segment_name) for segment_name in segment_names] + raw_rbs_segments = self._redis.mget(keys) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_names) + _LOGGER.debug(raw_rbs_segments) + for i in range(len(raw_rbs_segments)): + rbs_segment = None + try: + rbs_segment = rule_based_segments.from_raw(json.loads(raw_rbs_segments[i])) + except (ValueError, TypeError): + _LOGGER.error('Could not parse rule based segment.') + _LOGGER.debug("Raw rule based segment that failed parsing attempt: %s", raw_rbs_segments[i]) + to_return[segment_names[i]] = rbs_segment + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return to_return + class RedisRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): """Redis-based storage for rule based segments.""" @@ -246,6 +275,35 @@ async def get_large_segment_names(self): """ pass + async def fetch_many(self, segment_names): + """ + Retrieve rule based segment. + + :param segment_names: Names of the rule based segments to fetch. + :type segment_names: list(str) + + :return: A dict with rule based segment objects parsed from redis. + :rtype: dict(segment_name, splitio.models.rule_based_segment.RuleBasedSegment) + """ + to_return = dict() + try: + keys = [self._get_key(segment_name) for segment_name in segment_names] + raw_rbs_segments = await self._redis.mget(keys) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_names) + _LOGGER.debug(raw_rbs_segments) + for i in range(len(raw_rbs_segments)): + rbs_segment = None + try: + rbs_segment = rule_based_segments.from_raw(json.loads(raw_rbs_segments[i])) + except (ValueError, TypeError): + _LOGGER.error('Could not parse rule based segment.') + _LOGGER.debug("Raw rule based segment that failed parsing attempt: %s", raw_rbs_segments[i]) + to_return[segment_names[i]] = rbs_segment + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return to_return + class RedisSplitStorageBase(SplitStorage): """Redis-based storage base for feature flags.""" diff --git a/splitio/sync/segment.py b/splitio/sync/segment.py index 59d9fad8..a87759e1 100644 --- a/splitio/sync/segment.py +++ b/splitio/sync/segment.py @@ -10,6 +10,7 @@ from splitio.util.backoff import Backoff from splitio.optional.loaders import asyncio, aiofiles from splitio.sync import util +from splitio.util.storage_helper import get_standard_segment_names_in_rbs_storage, get_standard_segment_names_in_rbs_storage_async from splitio.optional.loaders import asyncio _LOGGER = logging.getLogger(__name__) @@ -22,7 +23,7 @@ class SegmentSynchronizer(object): - def __init__(self, segment_api, feature_flag_storage, segment_storage): + def __init__(self, segment_api, feature_flag_storage, segment_storage, rule_based_segment_storage): """ Class constructor. @@ -39,6 +40,7 @@ def __init__(self, segment_api, feature_flag_storage, segment_storage): self._api = segment_api self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage + self._rule_based_segment_storage = rule_based_segment_storage self._worker_pool = workerpool.WorkerPool(_MAX_WORKERS, self.synchronize_segment) self._worker_pool.start() self._backoff = Backoff( @@ -181,9 +183,12 @@ def synchronize_segments(self, segment_names = None, dont_wait = False): :rtype: bool """ if segment_names is None: - segment_names = self._feature_flag_storage.get_segment_names() + segment_names = set(self._feature_flag_storage.get_segment_names()) + segment_names.update(get_standard_segment_names_in_rbs_storage(self._rule_based_segment_storage)) for segment_name in segment_names: + _LOGGER.debug("Adding segment name to sync worker") + _LOGGER.debug(segment_name) self._worker_pool.submit_work(segment_name) if (dont_wait): return True @@ -204,7 +209,7 @@ def segment_exist_in_storage(self, segment_name): class SegmentSynchronizerAsync(object): - def __init__(self, segment_api, feature_flag_storage, segment_storage): + def __init__(self, segment_api, feature_flag_storage, segment_storage, rule_based_segment_storage): """ Class constructor. @@ -221,6 +226,7 @@ def __init__(self, segment_api, feature_flag_storage, segment_storage): self._api = segment_api self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage + self._rule_based_segment_storage = rule_based_segment_storage self._worker_pool = workerpool.WorkerPoolAsync(_MAX_WORKERS, self.synchronize_segment) self._worker_pool.start() self._backoff = Backoff( @@ -364,7 +370,8 @@ async def synchronize_segments(self, segment_names = None, dont_wait = False): :rtype: bool """ if segment_names is None: - segment_names = await self._feature_flag_storage.get_segment_names() + segment_names = set(await self._feature_flag_storage.get_segment_names()) + segment_names.update(await get_standard_segment_names_in_rbs_storage_async(self._rule_based_segment_storage)) self._jobs = await self._worker_pool.submit_work(segment_names) if (dont_wait): diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 3a16068b..1d1722f6 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -15,6 +15,7 @@ from splitio.util.time import get_current_epoch_time_ms from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ update_rule_based_segment_storage, update_rule_based_segment_storage_async + from splitio.sync import util from splitio.optional.loaders import asyncio, aiofiles @@ -138,7 +139,7 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t'], self._api.clear_storage) fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage) + segment_list.update(update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage)) segment_list.update(rbs_segment_list) if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: @@ -392,6 +393,25 @@ class LocalSplitSynchronizerBase(object): """Localhost mode feature_flag base synchronizer.""" _DEFAULT_FEATURE_FLAG_TILL = -1 + _DEFAULT_RB_SEGMENT_TILL = -1 + + def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode=LocalhostMode.LEGACY): + """ + Class constructor. + + :param filename: File to parse feature flags from. + :type filename: str + :param feature_flag_storage: Feature flag Storage. + :type feature_flag_storage: splitio.storage.InMemorySplitStorage + :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. + :type localhost_mode: splitio.sync.split.LocalhostMode + """ + self._filename = filename + self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage + self._localhost_mode = localhost_mode + self._current_ff_sha = "-1" + self._current_rbs_sha = "-1" @staticmethod def _make_feature_flag(feature_flag_name, conditions, configs=None): @@ -541,7 +561,7 @@ def _sanitize_rb_segment_elements(self, parsed_rb_segments): _LOGGER.warning("A rule based segment in json file does not have (Name) or property is empty, skipping.") continue for element in [('trafficTypeName', 'user', None, None, None, None), - ('status', splits.Status.ACTIVE, None, None, [splits.Status.ACTIVE, splits.Status.ARCHIVED], None), + ('status', splits.Status.ACTIVE.value, None, None, [e.value for e in splits.Status], None), ('changeNumber', 0, 0, None, None, None)]: rb_segment = util._sanitize_object_element(rb_segment, 'rule based segment', element[0], element[1], lower_value=element[2], upper_value=element[3], in_list=element[4], not_in_list=element[5]) rb_segment = self._sanitize_condition(rb_segment) @@ -632,6 +652,9 @@ def _convert_yaml_to_feature_flag(cls, parsed): to_return[feature_flag_name] = cls._make_feature_flag(feature_flag_name, whitelist + all_keys, configs) return to_return + def _check_exit_conditions(self, storage_cn, parsed_till, default_till): + if storage_cn > parsed_till and parsed_till != default_till: + return True class LocalSplitSynchronizer(LocalSplitSynchronizerBase): """Localhost mode feature_flag synchronizer.""" @@ -647,12 +670,8 @@ def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, l :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. :type localhost_mode: splitio.sync.split.LocalhostMode """ - self._filename = filename - self._feature_flag_storage = feature_flag_storage - self._rule_based_segment_storage = rule_based_segment_storage - self._localhost_mode = localhost_mode - self._current_json_sha = "-1" - + LocalSplitSynchronizerBase.__init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode) + @classmethod def _read_feature_flags_from_legacy_file(cls, filename): """ @@ -744,18 +763,24 @@ def _synchronize_json(self): try: parsed = self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(parsed)) - if fecthed_sha == self._current_json_sha: + fecthed_ff_sha = util._get_sha(json.dumps(parsed['ff'])) + fecthed_rbs_sha = util._get_sha(json.dumps(parsed['rbs'])) + + if fecthed_ff_sha == self._current_ff_sha and fecthed_rbs_sha == self._current_rbs_sha: return [] - self._current_json_sha = fecthed_sha - if self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: + self._current_ff_sha = fecthed_ff_sha + self._current_rbs_sha = fecthed_rbs_sha + + if self._check_exit_conditions(self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL) \ + and self._check_exit_conditions(self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + if not self._check_exit_conditions(self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL): + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) - if self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + if not self._check_exit_conditions(self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] segment_list.update(update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) @@ -763,7 +788,7 @@ def _synchronize_json(self): except Exception as exc: _LOGGER.debug('Exception: ', exc_info=True) - raise ValueError("Error reading feature flags from json.") from exc + raise ValueError("Error reading feature flags from json.") from exc def _read_feature_flags_from_json_file(self, filename): """ @@ -778,6 +803,11 @@ def _read_feature_flags_from_json_file(self, filename): try: with open(filename, 'r') as flo: parsed = json.load(flo) + + # check if spec version is old + if parsed.get('splits'): + parsed = util.convert_to_new_spec(parsed) + santitized = self._sanitize_json_elements(parsed) santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) @@ -787,7 +817,6 @@ def _read_feature_flags_from_json_file(self, filename): _LOGGER.debug('Exception: ', exc_info=True) raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc - class LocalSplitSynchronizerAsync(LocalSplitSynchronizerBase): """Localhost mode async feature_flag synchronizer.""" @@ -802,11 +831,7 @@ def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, l :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. :type localhost_mode: splitio.sync.split.LocalhostMode """ - self._filename = filename - self._feature_flag_storage = feature_flag_storage - self._rule_based_segment_storage = rule_based_segment_storage - self._localhost_mode = localhost_mode - self._current_json_sha = "-1" + LocalSplitSynchronizerBase.__init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode) @classmethod async def _read_feature_flags_from_legacy_file(cls, filename): @@ -900,18 +925,24 @@ async def _synchronize_json(self): try: parsed = await self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(parsed)) - if fecthed_sha == self._current_json_sha: + fecthed_ff_sha = util._get_sha(json.dumps(parsed['ff'])) + fecthed_rbs_sha = util._get_sha(json.dumps(parsed['rbs'])) + + if fecthed_ff_sha == self._current_ff_sha and fecthed_rbs_sha == self._current_rbs_sha: return [] - self._current_json_sha = fecthed_sha - if await self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: + self._current_ff_sha = fecthed_ff_sha + self._current_rbs_sha = fecthed_rbs_sha + + if self._check_exit_conditions(await self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL) \ + and self._check_exit_conditions(await self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + if not self._check_exit_conditions(await self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL): + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) - if await self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + if not self._check_exit_conditions(await self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] segment_list.update(await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) @@ -934,6 +965,11 @@ async def _read_feature_flags_from_json_file(self, filename): try: async with aiofiles.open(filename, 'r') as flo: parsed = json.loads(await flo.read()) + + # check if spec version is old + if parsed.get('splits'): + parsed = util.convert_to_new_spec(parsed) + santitized = self._sanitize_json_elements(parsed) santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) diff --git a/splitio/sync/util.py b/splitio/sync/util.py index 07ec5f24..cd32d2c2 100644 --- a/splitio/sync/util.py +++ b/splitio/sync/util.py @@ -62,3 +62,7 @@ def _sanitize_object_element(object, object_name, element_name, default_value, l _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) return object + +def convert_to_new_spec(body): + return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, + "rbs": {"d": [], "s": -1, "t": -1}} diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index d1c37f92..81fdef65 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -1,6 +1,7 @@ """Storage Helper.""" import logging from splitio.models import splits +from splitio.models import rule_based_segments _LOGGER = logging.getLogger(__name__) @@ -58,7 +59,7 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(set(rule_based_segment.excluded.get_excluded_standard_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if rule_based_segment_storage.get(rule_based_segment.name) is not None: @@ -66,7 +67,22 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list + +def get_standard_segment_names_in_rbs_storage(rule_based_segment_storage): + """ + Retrieve a list of all standard segments names. + :return: Set of segment names. + :rtype: Set(str) + """ + segment_list = set() + for rb_segment in rule_based_segment_storage.get_segment_names(): + rb_segment_obj = rule_based_segment_storage.get(rb_segment) + segment_list.update(set(rb_segment_obj.excluded.get_excluded_standard_segments())) + segment_list.update(rb_segment_obj.get_condition_segment_names()) + + return segment_list + async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number, clear_storage=False): """ Update feature flag storage from given list of feature flags while checking the flag set logic @@ -121,7 +137,7 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(set(rule_based_segment.excluded.get_excluded_standard_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if await rule_based_segment_storage.get(rule_based_segment.name) is not None: @@ -130,6 +146,22 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru await rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list +async def get_standard_segment_names_in_rbs_storage_async(rule_based_segment_storage): + """ + Retrieve a list of all standard segments names. + + :return: Set of segment names. + :rtype: Set(str) + """ + segment_list = set() + segment_names = await rule_based_segment_storage.get_segment_names() + for rb_segment in segment_names: + rb_segment_obj = await rule_based_segment_storage.get(rb_segment) + segment_list.update(set(rb_segment_obj.excluded.get_excluded_standard_segments())) + segment_list.update(rb_segment_obj.get_condition_segment_names()) + + return segment_list + def get_valid_flag_sets(flag_sets, flag_set_filter): """ Check each flag set in given array, return it if exist in a given config flag set array, if config array is empty return all diff --git a/splits.json b/splits.json deleted file mode 100644 index 67bd4fbe..00000000 --- a/splits.json +++ /dev/null @@ -1 +0,0 @@ -{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 123, "trafficTypeName": "user", "name": "third_split", "trafficAllocation": 100, "trafficAllocationSeed": 123456, "seed": 321654, "status": "ACTIVE", "killed": true, "defaultTreatment": "off", "algo": 2, "conditions": [{"partitions": [{"treatment": "on", "size": 50}, {"treatment": "off", "size": 50}], "contitionType": "WHITELIST", "label": "some_label", "matcherGroup": {"matchers": [{"matcherType": "WHITELIST", "whitelistMatcherData": {"whitelist": ["k1", "k2", "k3"]}, "negate": false}], "combiner": "AND"}}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}], "sets": ["set6"]}]}, "rbs": {"t": 1675095324253, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index bfb45c16..c9aeee8b 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -122,6 +122,41 @@ def get(sdk, splitChanges, sdk_key, extra_headers, query): assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} assert split_api.clear_storage + + def test_using_old_spec_since(self, mocker): + """Test using old_spec_since variable.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(400, 'error', {}) + if self.counter == 4: + return client.HttpResponse(200, '{"splits": [], "since": 456, "till": 456}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + + response = split_api.fetch_splits(456, -1, FetchOptions(False, None, None, None)) + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 1000000 + assert self.query[2] == {'s': '1.3', 'since': 456, 'rbSince': -1} + assert self.query[3] == {'s': '1.1', 'since': 456} + assert response == {"ff": {"d": [], "s": 456, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} class SplitAPIAsyncTests(object): """Split async API test cases.""" @@ -253,9 +288,45 @@ async def get(sdk, splitChanges, sdk_key, extra_headers, query): assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] assert not split_api.clear_storage - time.sleep(1) + time.sleep(1) splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} assert split_api.clear_storage + + @pytest.mark.asyncio + async def test_using_old_spec_since(self, mocker): + """Test using old_spec_since variable.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + async def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(400, 'error', {}) + if self.counter == 4: + return client.HttpResponse(200, '{"splits": [], "since": 456, "till": 456}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPIAsync(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + + response = await split_api.fetch_splits(456, -1, FetchOptions(False, None, None, None)) + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 1000000 + assert self.query[2] == {'s': '1.3', 'since': 456, 'rbSince': -1} + assert self.query[3] == {'s': '1.1', 'since': 456} + assert response == {"ff": {"d": [], "s": 456, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 526b7347..49b6ba7a 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -1054,7 +1054,7 @@ def test_telemetry_record_treatment_exception(self, mocker): split_storage = InMemorySplitStorage() split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) segment_storage = mocker.Mock(spec=SegmentStorage) - rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 81b1c06b..2f15d038 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -8,7 +8,7 @@ from splitio.client.key import Key from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemoryTelemetryStorageAsync, \ - InMemorySplitStorage, InMemorySplitStorageAsync + InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.models.splits import Split from splitio.client import input_validator from splitio.recorder.recorder import StandardRecorder, StandardRecorderAsync @@ -30,6 +30,8 @@ def test_get_treatment(self, mocker): type(split_mock).conditions = conditions_mock storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = {'some_feature': split_mock} + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -40,7 +42,7 @@ def test_get_treatment(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -268,6 +270,8 @@ def _configs(treatment): split_mock.get_configurations_for.side_effect = _configs storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = {'some_feature': split_mock} + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -278,7 +282,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -819,6 +823,9 @@ def test_get_treatments(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() telemetry_producer = TelemetryStorageProducer(telemetry_storage) @@ -828,7 +835,7 @@ def test_get_treatments(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -963,6 +970,8 @@ def test_get_treatments_with_config(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -973,7 +982,7 @@ def test_get_treatments_with_config(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1108,6 +1117,8 @@ def test_get_treatments_by_flag_set(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -1118,7 +1129,7 @@ def test_get_treatments_by_flag_set(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1224,6 +1235,8 @@ def test_get_treatments_by_flag_sets(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -1234,7 +1247,7 @@ def test_get_treatments_by_flag_sets(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1349,6 +1362,9 @@ def _configs(treatment): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) @@ -1360,7 +1376,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1469,6 +1485,9 @@ def _configs(treatment): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) @@ -1480,7 +1499,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1619,6 +1638,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_change_number(*_): return 1 @@ -1633,7 +1656,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1876,6 +1899,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_change_number(*_): return 1 @@ -1890,7 +1917,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2409,6 +2436,10 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2419,7 +2450,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2568,6 +2599,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2578,7 +2613,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2730,6 +2765,10 @@ async def fetch_many(*_): async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2740,7 +2779,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2867,6 +2906,11 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs + async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -2880,7 +2924,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3017,6 +3061,10 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -3030,7 +3078,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3160,6 +3208,11 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs + async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -3173,7 +3226,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, diff --git a/tests/engine/files/rule_base_segments.json b/tests/engine/files/rule_base_segments.json new file mode 100644 index 00000000..70b64b32 --- /dev/null +++ b/tests/engine/files/rule_base_segments.json @@ -0,0 +1,62 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": + [{ + "changeNumber": 5, + "name": "dependent_rbs", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ]}, + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded": { + "keys": [], + "segments": [] + }, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "dependent_rbs" + } + } + ] + } + } + ] + }] +}} diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json new file mode 100644 index 00000000..ee356fd8 --- /dev/null +++ b/tests/engine/files/rule_base_segments2.json @@ -0,0 +1,67 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"rule-based", "name":"no_excludes"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "START_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "bilal" + ] + } + } + ] + } + } + ] + }, + { + "changeNumber": 5, + "name": "no_excludes", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["bilal2@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/tests/engine/files/rule_base_segments3.json b/tests/engine/files/rule_base_segments3.json new file mode 100644 index 00000000..f738f3f7 --- /dev/null +++ b/tests/engine/files/rule_base_segments3.json @@ -0,0 +1,35 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"standard", "name":"segment1"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index de8f9325..a2937126 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -1,9 +1,12 @@ """Evaluator tests module.""" +import json import logging +import os import pytest import copy from splitio.models.splits import Split, Status +from splitio.models import segments from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label from splitio.models.grammar import condition @@ -115,7 +118,7 @@ def _build_evaluator_with_mocks(self, mocker): e = evaluator.Evaluator(splitter_mock) evaluator._LOGGER = logger_mock return e - + def test_evaluate_treatment_killed_split(self, mocker): """Test that a killed split returns the default treatment.""" e = self._build_evaluator_with_mocks(mocker) @@ -124,7 +127,8 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -142,7 +146,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -161,7 +165,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -188,7 +192,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), rbs_segments={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -211,7 +215,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -228,7 +232,7 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None)) assert treatment == 'on' assert label == 'some_label' @@ -237,13 +241,152 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): e = evaluator.Evaluator(splitters.Splitter()) mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}) + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw)}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'on' + + def test_evaluate_treatment_with_rbs_in_condition(self): + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + rbs_storage.update([rbs, rbs2], [], 12) + splits_storage.update([mocked_split], [], 12) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) - result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) - assert result['treatment'] == 'off' + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + + def test_using_segment_in_excluded(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs_storage.update([rbs], [], 12) + splits_storage.update([mocked_split], [], 12) + segment = segments.from_raw({'name': 'segment1', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + segment_storage.put(segment) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + ctx = evaluation_facctory.context_for('pato@split.io', ['some']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'some', {'email': 'pato@split.io'}, ctx)['treatment'] == "off" + + def test_using_rbs_in_excluded(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments2.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + rbs_storage.update([rbs, rbs2], [], 12) + splits_storage.update([mocked_split], [], 12) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" + ctx = evaluation_facctory.context_for('bilal', ['some']) + assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('bilal2@split.io', ['some']) + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" + + @pytest.mark.asyncio + async def test_evaluate_treatment_with_rbs_in_condition_async(self): + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + await rbs_storage.update([rbs, rbs2], [], 12) + await splits_storage.update([mocked_split], [], 12) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + + @pytest.mark.asyncio + async def test_using_segment_in_excluded_async(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + await rbs_storage.update([rbs], [], 12) + await splits_storage.update([mocked_split], [], 12) + segment = segments.from_raw({'name': 'segment1', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + await segment_storage.put(segment) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + ctx = await evaluation_facctory.context_for('pato@split.io', ['some']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'some', {'email': 'pato@split.io'}, ctx)['treatment'] == "off" + + @pytest.mark.asyncio + async def test_using_rbs_in_excluded_async(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments2.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + await rbs_storage.update([rbs, rbs2], [], 12) + await splits_storage.update([mocked_split], [], 12) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" + ctx = await evaluation_facctory.context_for('bilal', ['some']) + assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('bilal2@split.io', ['some']) + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" @@ -276,14 +419,12 @@ def test_get_context(self): eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": True} class EvaluationDataFactoryAsyncTests(object): @@ -318,12 +459,10 @@ async def test_get_context(self): eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = await eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} await segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = await eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": True} diff --git a/tests/helpers/mockserver.py b/tests/helpers/mockserver.py index 71cd186b..8d41cfd2 100644 --- a/tests/helpers/mockserver.py +++ b/tests/helpers/mockserver.py @@ -3,12 +3,13 @@ from collections import namedtuple import queue import threading +import pytest from http.server import HTTPServer, BaseHTTPRequestHandler Request = namedtuple('Request', ['method', 'path', 'headers', 'body']) - +OLD_SPEC = False class SSEMockServer(object): """SSE server for testing purposes.""" @@ -102,19 +103,22 @@ class SplitMockServer(object): protocol_version = 'HTTP/1.1' def __init__(self, split_changes=None, segment_changes=None, req_queue=None, - auth_response=None): + auth_response=None, old_spec=False): """ Consruct a mock server. :param changes: mapping of changeNumbers to splitChanges responses :type changes: dict """ + global OLD_SPEC + OLD_SPEC = old_spec split_changes = split_changes if split_changes is not None else {} segment_changes = segment_changes if segment_changes is not None else {} self._server = HTTPServer(('localhost', 0), lambda *xs: SDKHandler(split_changes, segment_changes, *xs, req_queue=req_queue, - auth_response=auth_response)) + auth_response=auth_response, + )) self._server_thread = threading.Thread(target=self._blocking_run, name="SplitMockServer", daemon=True) self._done_event = threading.Event() @@ -148,7 +152,7 @@ def __init__(self, split_changes, segment_changes, *args, **kwargs): self._req_queue = kwargs.get('req_queue') self._auth_response = kwargs.get('auth_response') self._split_changes = split_changes - self._segment_changes = segment_changes + self._segment_changes = segment_changes BaseHTTPRequestHandler.__init__(self, *args) def _parse_qs(self): @@ -180,6 +184,15 @@ def _handle_segment_changes(self): self.wfile.write(json.dumps(to_send).encode('utf-8')) def _handle_split_changes(self): + global OLD_SPEC + if OLD_SPEC: + self.send_response(400) + self.send_header("Content-type", "application/json") + self.end_headers() + self.wfile.write('{}'.encode('utf-8')) + OLD_SPEC = False + return + qstring = self._parse_qs() since = int(qstring.get('since', -1)) to_send = self._split_changes.get(since) diff --git a/tests/integration/files/split_changes_temp.json b/tests/integration/files/split_changes_temp.json index 24d876a4..64575226 100644 --- a/tests/integration/files/split_changes_temp.json +++ b/tests/integration/files/split_changes_temp.json @@ -1 +1 @@ -{"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1", "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "configurations": {}, "conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": [], "segments": []}, "conditions": []}]}} \ No newline at end of file +{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/integration/files/split_old_spec.json b/tests/integration/files/split_old_spec.json new file mode 100644 index 00000000..0d7edf86 --- /dev/null +++ b/tests/integration/files/split_old_spec.json @@ -0,0 +1,328 @@ +{ + "splits": [ + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "whitelist_feature", + "seed": -1222652054, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "whitelisted_user" + ] + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ] + } + ], + "sets": ["set1", "set2"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "all_feature", + "seed": 1699838640, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": ["set4"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "killed_feature", + "seed": -480091424, + "status": "ACTIVE", + "killed": true, + "changeNumber": 123, + "defaultTreatment": "defTreatment", + "configurations": { + "off": "{\"size\":15,\"test\":20}", + "defTreatment": "{\"size\":15,\"defTreatment\":true}" + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "defTreatment", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": ["set3"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "sample_feature", + "seed": 1548363147, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "configurations": { + "on": "{\"size\":15,\"test\":20}" + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "human_beigns" + }, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 30 + }, + { + "treatment": "off", + "size": 70 + } + ] + } + ], + "sets": ["set1"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "dependency_test", + "seed": 1222652054, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SPLIT_TREATMENT", + "negate": false, + "userDefinedSegmentMatcherData": null, + "dependencyMatcherData": { + "split": "all_feature", + "treatments": ["on"] + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ] + } + ], + "sets": [] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "regex_test", + "seed": 1222652051, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "MATCHES_STRING", + "negate": false, + "userDefinedSegmentMatcherData": null, + "stringMatcherData": "abc[0-9]" + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": [] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "boolean_test", + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "seed": 12321809, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "EQUAL_TO_BOOLEAN", + "negate": false, + "userDefinedSegmentMatcherData": null, + "booleanMatcherData": true + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": [] + } + ], + "since": -1, + "till": 1457726098069 +} \ No newline at end of file diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index b1f5d836..f16352e3 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -1,5 +1,6 @@ """Client integration tests.""" # pylint: disable=protected-access,line-too-long,no-self-use +from asyncio import Queue import json import os import threading @@ -41,6 +42,7 @@ from splitio.sync.synchronizer import PluggableSynchronizer, PluggableSynchronizerAsync from splitio.sync.telemetry import RedisTelemetrySubmitter, RedisTelemetrySubmitterAsync +from tests.helpers.mockserver import SplitMockServer from tests.integration import splits_json from tests.storage.test_pluggable import StorageMockAdapter, StorageMockAdapterAsync @@ -99,7 +101,7 @@ def _validate_last_events(client, *to_validate): as_tup_set = set((i.key, i.traffic_type_name, i.event_type_id, i.value, str(i.properties)) for i in events) assert as_tup_set == set(to_validate) -def _get_treatment(factory): +def _get_treatment(factory, skip_rbs=False): """Test client.get_treatment().""" try: client = factory.client() @@ -156,6 +158,9 @@ def _get_treatment(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): _validate_last_impressions(client, ('regex_test', 'abc4', 'on')) + if skip_rbs: + return + # test rule based segment matcher assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): @@ -419,7 +424,7 @@ def _track(factory): ('user1', 'user', 'conversion', 1, "{'prop1': 'value1'}") ) -def _manager_methods(factory): +def _manager_methods(factory, skip_rbs=False): """Test manager.split/splits.""" try: manager = factory.manager() @@ -450,6 +455,11 @@ def _manager_methods(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' + if skip_rbs: + assert len(manager.split_names()) == 7 + assert len(manager.splits()) == 7 + return + assert len(manager.split_names()) == 8 assert len(manager.splits()) == 8 @@ -745,6 +755,159 @@ def test_track(self): """Test client.track().""" _track(self.factory) +class InMemoryOldSpecIntegrationTests(object): + """Inmemory storage-based integration tests.""" + + def setup_method(self): + """Prepare storages with test data.""" + + split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_old_spec.json') + with open(split_fn, 'r') as flo: + data = json.loads(flo.read()) + + split_changes = { + -1: data, + 1457726098069: {"splits": [], "till": 1457726098069, "since": 1457726098069} + } + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') + with open(segment_fn, 'r') as flo: + segment_employee = json.loads(flo.read()) + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') + with open(segment_fn, 'r') as flo: + segment_human = json.loads(flo.read()) + + segment_changes = { + ("employees", -1): segment_employee, + ("employees", 1457474612832): {"name": "employees","added": [],"removed": [],"since": 1457474612832,"till": 1457474612832}, + ("human_beigns", -1): segment_human, + ("human_beigns", 1457102183278): {"name": "employees","added": [],"removed": [],"since": 1457102183278,"till": 1457102183278}, + } + + split_backend_requests = Queue() + self.split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests, + {'auth_response': {'pushEnabled': False}}, True) + self.split_backend.start() + + kwargs = { + 'sdk_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'events_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'auth_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'config': {'connectTimeout': 10000, 'streamingEnabled': False, 'impressionsMode': 'debug'} + } + + self.factory = get_factory('some_apikey', **kwargs) + self.factory.block_until_ready(1) + assert self.factory.ready + + def teardown_method(self): + """Shut down the factory.""" + event = threading.Event() + self.factory.destroy(event) + event.wait() + self.split_backend.stop() + time.sleep(1) + + def test_get_treatment(self): + """Test client.get_treatment().""" + _get_treatment(self.factory, True) + + def test_get_treatment_with_config(self): + """Test client.get_treatment_with_config().""" + _get_treatment_with_config(self.factory) + + def test_get_treatments(self): + _get_treatments(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = client.get_treatments('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == 'on' + assert result['killed_feature'] == 'defTreatment' + assert result['invalid_feature'] == 'control' + assert result['sample_feature'] == 'off' + _validate_last_impressions( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off') + ) + + def test_get_treatments_with_config(self): + """Test client.get_treatments_with_config().""" + _get_treatments_with_config(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = client.get_treatments_with_config('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == ('on', None) + assert result['killed_feature'] == ('defTreatment', '{"size":15,"defTreatment":true}') + assert result['invalid_feature'] == ('control', None) + assert result['sample_feature'] == ('off', None) + _validate_last_impressions( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off'), + ) + + def test_get_treatments_by_flag_set(self): + """Test client.get_treatments_by_flag_set().""" + _get_treatments_by_flag_set(self.factory) + + def test_get_treatments_by_flag_sets(self): + """Test client.get_treatments_by_flag_sets().""" + _get_treatments_by_flag_sets(self.factory) + client = self.factory.client() + result = client.get_treatments_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': 'on', + 'whitelist_feature': 'off', + 'all_feature': 'on' + } + _validate_last_impressions(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + + def test_get_treatments_with_config_by_flag_set(self): + """Test client.get_treatments_with_config_by_flag_set().""" + _get_treatments_with_config_by_flag_set(self.factory) + + def test_get_treatments_with_config_by_flag_sets(self): + """Test client.get_treatments_with_config_by_flag_sets().""" + _get_treatments_with_config_by_flag_sets(self.factory) + client = self.factory.client() + result = client.get_treatments_with_config_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': ('on', '{"size":15,"test":20}'), + 'whitelist_feature': ('off', None), + 'all_feature': ('on', None) + } + _validate_last_impressions(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + + def test_track(self): + """Test client.track().""" + _track(self.factory) + + def test_manager_methods(self): + """Test manager.split/splits.""" + _manager_methods(self.factory, True) + class RedisIntegrationTests(object): """Redis storage-based integration tests.""" @@ -2423,6 +2586,194 @@ async def test_track(self): await _track_async(self.factory) await self.factory.destroy() +class InMemoryOldSpecIntegrationAsyncTests(object): + """Inmemory storage-based integration tests.""" + + def setup_method(self): + self.setup_task = asyncio.get_event_loop().create_task(self._setup_method()) + + async def _setup_method(self): + """Prepare storages with test data.""" + + split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_old_spec.json') + with open(split_fn, 'r') as flo: + data = json.loads(flo.read()) + + split_changes = { + -1: data, + 1457726098069: {"splits": [], "till": 1457726098069, "since": 1457726098069} + } + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') + with open(segment_fn, 'r') as flo: + segment_employee = json.loads(flo.read()) + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') + with open(segment_fn, 'r') as flo: + segment_human = json.loads(flo.read()) + + segment_changes = { + ("employees", -1): segment_employee, + ("employees", 1457474612832): {"name": "employees","added": [],"removed": [],"since": 1457474612832,"till": 1457474612832}, + ("human_beigns", -1): segment_human, + ("human_beigns", 1457102183278): {"name": "employees","added": [],"removed": [],"since": 1457102183278,"till": 1457102183278}, + } + + split_backend_requests = Queue() + self.split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests, + {'auth_response': {'pushEnabled': False}}, True) + self.split_backend.start() + + kwargs = { + 'sdk_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'events_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'auth_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'config': {'connectTimeout': 10000, 'streamingEnabled': False, 'impressionsMode': 'debug'} + } + + self.factory = await get_factory_async('some_apikey', **kwargs) + await self.factory.block_until_ready(1) + assert self.factory.ready + + @pytest.mark.asyncio + async def test_get_treatment(self): + """Test client.get_treatment().""" + await self.setup_task + await _get_treatment_async(self.factory, True) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatment_with_config(self): + """Test client.get_treatment_with_config().""" + await self.setup_task + await _get_treatment_with_config_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments(self): + await self.setup_task + await _get_treatments_async(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = await client.get_treatments('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == 'on' + assert result['killed_feature'] == 'defTreatment' + assert result['invalid_feature'] == 'control' + assert result['sample_feature'] == 'off' + await _validate_last_impressions_async( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config(self): + """Test client.get_treatments_with_config().""" + await self.setup_task + await _get_treatments_with_config_async(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = await client.get_treatments_with_config('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == ('on', None) + assert result['killed_feature'] == ('defTreatment', '{"size":15,"defTreatment":true}') + assert result['invalid_feature'] == ('control', None) + assert result['sample_feature'] == ('off', None) + await _validate_last_impressions_async( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off'), + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_by_flag_set(self): + """Test client.get_treatments_by_flag_set().""" + await self.setup_task + await _get_treatments_by_flag_set_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_by_flag_sets(self): + """Test client.get_treatments_by_flag_sets().""" + await self.setup_task + await _get_treatments_by_flag_sets_async(self.factory) + client = self.factory.client() + result = await client.get_treatments_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': 'on', + 'whitelist_feature': 'off', + 'all_feature': 'on' + } + await _validate_last_impressions_async(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config_by_flag_set(self): + """Test client.get_treatments_with_config_by_flag_set().""" + await self.setup_task + await _get_treatments_with_config_by_flag_set_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config_by_flag_sets(self): + """Test client.get_treatments_with_config_by_flag_sets().""" + await self.setup_task + await _get_treatments_with_config_by_flag_sets_async(self.factory) + client = self.factory.client() + result = await client.get_treatments_with_config_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': ('on', '{"size":15,"test":20}'), + 'whitelist_feature': ('off', None), + 'all_feature': ('on', None) + } + await _validate_last_impressions_async(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_track(self): + """Test client.track().""" + await self.setup_task + await _track_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_manager_methods(self): + """Test manager.split/splits.""" + await self.setup_task + await _manager_methods_async(self.factory, True) + await self.factory.destroy() + self.split_backend.stop() + class RedisIntegrationAsyncTests(object): """Redis storage-based integration tests.""" @@ -3992,7 +4343,7 @@ async def clear_cache(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) for key in keys_to_delete: await redis_client.delete(key) - + async def _validate_last_impressions_async(client, *to_validate): """Validate the last N impressions are present disregarding the order.""" imp_storage = client._factory._get_storage('impressions') @@ -4048,7 +4399,7 @@ async def _validate_last_events_async(client, *to_validate): as_tup_set = set((i.key, i.traffic_type_name, i.event_type_id, i.value, str(i.properties)) for i in events) assert as_tup_set == set(to_validate) -async def _get_treatment_async(factory): +async def _get_treatment_async(factory, skip_rbs=False): """Test client.get_treatment().""" try: client = factory.client() @@ -4105,6 +4456,9 @@ async def _get_treatment_async(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): await _validate_last_impressions_async(client, ('regex_test', 'abc4', 'on')) + if skip_rbs: + return + # test rule based segment matcher assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): @@ -4368,7 +4722,7 @@ async def _track_async(factory): ('user1', 'user', 'conversion', 1, "{'prop1': 'value1'}") ) -async def _manager_methods_async(factory): +async def _manager_methods_async(factory, skip_rbs=False): """Test manager.split/splits.""" try: manager = factory.manager() @@ -4399,5 +4753,10 @@ async def _manager_methods_async(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' + if skip_rbs: + assert len(await manager.split_names()) == 7 + assert len(await manager.splits()) == 7 + return + assert len(await manager.split_names()) == 8 assert len(await manager.splits()) == 8 diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index 12de99e8..680a8cc7 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -12,6 +12,7 @@ from splitio.models.grammar import matchers from splitio.models import splits +from splitio.models import rule_based_segments from splitio.models.grammar import condition from splitio.models.grammar.matchers.utils.utils import Semver from splitio.storage import SegmentStorage @@ -404,9 +405,9 @@ def test_matcher_behaviour(self, mocker): matcher = matchers.UserDefinedSegmentMatcher(self.raw) # Test that if the key if the storage wrapper finds the key in the segment, it matches. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {})}) is True + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {})}) is True # Test that if the key if the storage wrapper doesn't find the key in the segment, it fails. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {})}) is False + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {})}) is False def test_to_json(self): """Test that the object serializes to JSON properly.""" @@ -1095,3 +1096,44 @@ def test_to_str(self): """Test that the object serializes to str properly.""" as_str = matchers.InListSemverMatcher(self.raw) assert str(as_str) == "in list semver ['2.1.8', '2.1.11']" + +class RuleBasedMatcherTests(MatcherTestsBase): + """Rule based segment matcher test cases.""" + + raw ={ + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + + def test_from_raw(self, mocker): + """Test parsing from raw json/dict.""" + parsed = matchers.from_raw(self.raw) + assert isinstance(parsed, matchers.RuleBasedSegmentMatcher) + + def test_to_json(self): + """Test that the object serializes to JSON properly.""" + as_json = matchers.AllKeysMatcher(self.raw).to_json() + assert as_json['matcherType'] == 'IN_RULE_BASED_SEGMENT' + + def test_matcher_behaviour(self, mocker): + """Test if the matcher works properly.""" + rbs_segments = os.path.join(os.path.dirname(__file__), '../../engine/files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + matcher = matchers.RuleBasedSegmentMatcher(self.raw) + ec ={'ec': EvaluationContext( + {}, + {"segment1": False}, + {"sample_rule_based_segment": rbs} + )} + assert matcher._match(None, context=ec) is False + assert matcher._match('bilal@split.io', context=ec) is False + assert matcher._match('bilal@split.io', {'email': 'bilal@split.io'}, context=ec) is True \ No newline at end of file diff --git a/tests/models/test_rule_based_segments.py b/tests/models/test_rule_based_segments.py index 3ad36773..98e35fe8 100644 --- a/tests/models/test_rule_based_segments.py +++ b/tests/models/test_rule_based_segments.py @@ -1,9 +1,9 @@ """Split model tests module.""" import copy -import pytest from splitio.models import rule_based_segments from splitio.models import splits from splitio.models.grammar.condition import Condition +from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher class RuleBasedSegmentModelTests(object): """Rule based segment model tests.""" @@ -100,5 +100,4 @@ def test_get_condition_segment_names(self): }) rbs = rule_based_segments.from_raw(rbs) - assert rbs.get_condition_segment_names() == {"employees"} - \ No newline at end of file + assert rbs.get_condition_segment_names() == {"employees"} \ No newline at end of file diff --git a/tests/storage/test_redis.py b/tests/storage/test_redis.py index 04ddfc60..4537998c 100644 --- a/tests/storage/test_redis.py +++ b/tests/storage/test_redis.py @@ -1289,6 +1289,25 @@ def test_contains(self, mocker): assert not storage.contains(['segment1', 'segment4']) assert storage.contains(['segment1']) assert not storage.contains(['segment4', 'segment5']) + + def test_fetch_many(self, mocker): + """Test retrieving a list of passed splits.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorage(adapter) + from_raw = mocker.Mock() + mocker.patch('splitio.storage.redis.rule_based_segments.from_raw', new=from_raw) + + adapter.mget.return_value = ['{"name": "rbs1"}', '{"name": "rbs2"}', None] + + result = storage.fetch_many(['rbs1', 'rbs2', 'rbs3']) + assert len(result) == 3 + + assert mocker.call({'name': 'rbs1'}) in from_raw.mock_calls + assert mocker.call({'name': 'rbs2'}) in from_raw.mock_calls + + assert result['rbs1'] is not None + assert result['rbs2'] is not None + assert 'rbs3' in result class RedisRuleBasedSegmentStorageAsyncTests(object): """Redis rule based segment storage test cases.""" @@ -1391,3 +1410,25 @@ async def keys(sel, key): assert not await storage.contains(['segment1', 'segment4']) assert await storage.contains(['segment1']) assert not await storage.contains(['segment4', 'segment5']) + + @pytest.mark.asyncio + async def test_fetch_many(self, mocker): + """Test retrieving a list of passed splits.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + from_raw = mocker.Mock() + mocker.patch('splitio.storage.redis.rule_based_segments.from_raw', new=from_raw) + async def mget(*_): + return ['{"name": "rbs1"}', '{"name": "rbs2"}', None] + adapter.mget = mget + + result = await storage.fetch_many(['rbs1', 'rbs2', 'rbs3']) + assert len(result) == 3 + + assert mocker.call({'name': 'rbs1'}) in from_raw.mock_calls + assert mocker.call({'name': 'rbs2'}) in from_raw.mock_calls + + assert result['rbs1'] is not None + assert result['rbs2'] is not None + assert 'rbs3' in result + diff --git a/tests/sync/test_segments_synchronizer.py b/tests/sync/test_segments_synchronizer.py index 5a6ef849..e88db2fa 100644 --- a/tests/sync/test_segments_synchronizer.py +++ b/tests/sync/test_segments_synchronizer.py @@ -5,10 +5,11 @@ from splitio.util.backoff import Backoff from splitio.api import APIException from splitio.api.commons import FetchOptions -from splitio.storage import SplitStorage, SegmentStorage +from splitio.storage import SplitStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySegmentStorageAsync, InMemorySplitStorage, InMemorySplitStorageAsync from splitio.sync.segment import SegmentSynchronizer, SegmentSynchronizerAsync, LocalSegmentSynchronizer, LocalSegmentSynchronizerAsync from splitio.models.segments import Segment +from splitio.models import rule_based_segments from splitio.optional.loaders import aiofiles, asyncio import pytest @@ -23,6 +24,8 @@ def test_synchronize_segments_error(self, mocker): storage = mocker.Mock(spec=SegmentStorage) storage.get_change_number.return_value = -1 + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] api = mocker.Mock() @@ -30,7 +33,7 @@ def run(x): raise APIException("something broke") api.fetch_segment.side_effect = run - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) assert not segments_synchronizer.synchronize_segments() def test_synchronize_segments(self, mocker): @@ -38,6 +41,10 @@ def test_synchronize_segments(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = ['rbs'] + rbs_storage.get.return_value = rule_based_segments.from_raw({'name': 'rbs', 'conditions': [], 'trafficTypeName': 'user', 'changeNumber': 123, 'status': 'ACTIVE', 'excluded': {'keys': [], 'segments': [{'type': 'standard', 'name': 'segmentD'}]}}) + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -52,10 +59,14 @@ def change_number_mock(segment_name): if segment_name == 'segmentC' and change_number_mock._count_c == 0: change_number_mock._count_c = 1 return -1 + if segment_name == 'segmentD' and change_number_mock._count_d == 0: + change_number_mock._count_d = 1 + return -1 return 123 change_number_mock._count_a = 0 change_number_mock._count_b = 0 change_number_mock._count_c = 0 + change_number_mock._count_d = 0 storage.get_change_number.side_effect = change_number_mock # Setup a mocked segment api to return segments mentioned before. @@ -72,27 +83,35 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 1 return {'name': 'segmentC', 'added': ['key7', 'key8', 'key9'], 'removed': [], 'since': -1, 'till': 123} + if segment_name == 'segmentD' and fetch_segment_mock._count_d == 0: + fetch_segment_mock._count_d = 1 + return {'name': 'segmentD', 'added': ['key10'], 'removed': [], + 'since': -1, 'till': 123} return {'added': [], 'removed': [], 'since': 123, 'till': 123} fetch_segment_mock._count_a = 0 fetch_segment_mock._count_b = 0 fetch_segment_mock._count_c = 0 + fetch_segment_mock._count_d = 0 api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) assert segments_synchronizer.synchronize_segments() api_calls = [call for call in api.fetch_segment.mock_calls] + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentD', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentD', 123, FetchOptions(True, None, None, None, None)) in api_calls segment_put_calls = storage.put.mock_calls - segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) + segments_to_validate = set(['segmentA', 'segmentB', 'segmentC', 'segmentD']) for call in segment_put_calls: _, positional_args, _ = call segment = positional_args[0] @@ -104,6 +123,8 @@ def test_synchronize_segment(self, mocker): """Test particular segment update.""" split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] def change_number_mock(segment_name): if change_number_mock._count_a == 0: @@ -124,7 +145,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) segments_synchronizer.synchronize_segment('segmentA') api_calls = [call for call in api.fetch_segment.mock_calls] @@ -137,6 +158,8 @@ def test_synchronize_segment_cdn(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] def change_number_mock(segment_name): change_number_mock._count_a += 1 @@ -170,7 +193,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) segments_synchronizer.synchronize_segment('segmentA') assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls @@ -183,7 +206,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): def test_recreate(self, mocker): """Test recreate logic.""" - segments_synchronizer = SegmentSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) + segments_synchronizer = SegmentSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) current_pool = segments_synchronizer._worker_pool segments_synchronizer.recreate() assert segments_synchronizer._worker_pool != current_pool @@ -196,6 +219,11 @@ class SegmentsSynchronizerAsyncTests(object): async def test_synchronize_segments_error(self, mocker): """On error.""" split_storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] @@ -215,7 +243,7 @@ async def run(*args): raise APIException("something broke") api.fetch_segment = run - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) assert not await segments_synchronizer.synchronize_segments() await segments_synchronizer.shutdown() @@ -227,6 +255,15 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return ['rbs'] + rbs_storage.get_segment_names = get_segment_names_rbs + + async def get_rbs(segment_name): + return rule_based_segments.from_raw({'name': 'rbs', 'conditions': [], 'trafficTypeName': 'user', 'changeNumber': 123, 'status': 'ACTIVE', 'excluded': {'keys': [], 'segments': [{'type': 'standard', 'name': 'segmentD'}]}}) + rbs_storage.get = get_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -241,10 +278,14 @@ async def change_number_mock(segment_name): if segment_name == 'segmentC' and change_number_mock._count_c == 0: change_number_mock._count_c = 1 return -1 + if segment_name == 'segmentD' and change_number_mock._count_d == 0: + change_number_mock._count_d = 1 + return -1 return 123 change_number_mock._count_a = 0 change_number_mock._count_b = 0 change_number_mock._count_c = 0 + change_number_mock._count_d = 0 storage.get_change_number = change_number_mock self.segment_put = [] @@ -276,25 +317,36 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 1 return {'name': 'segmentC', 'added': ['key7', 'key8', 'key9'], 'removed': [], 'since': -1, 'till': 123} + if segment_name == 'segmentD' and fetch_segment_mock._count_d == 0: + fetch_segment_mock._count_d = 1 + return {'name': 'segmentD', 'added': ['key10'], 'removed': [], + 'since': -1, 'till': 123} return {'added': [], 'removed': [], 'since': 123, 'till': 123} fetch_segment_mock._count_a = 0 fetch_segment_mock._count_b = 0 fetch_segment_mock._count_c = 0 + fetch_segment_mock._count_d = 0 api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) assert await segments_synchronizer.synchronize_segments() - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) - assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None, None)) - assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None, None)) - - segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) + api_calls = [] + for i in range(8): + api_calls.append((self.segment[i], self.change[i], self.options[i])) + + assert ('segmentD', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentD', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls + + segments_to_validate = set(['segmentA', 'segmentB', 'segmentC', 'segmentD']) for segment in self.segment_put: assert isinstance(segment, Segment) assert segment.name in segments_to_validate @@ -307,6 +359,11 @@ async def test_synchronize_segment(self, mocker): """Test particular segment update.""" split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def change_number_mock(segment_name): if change_number_mock._count_a == 0: @@ -340,7 +397,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) await segments_synchronizer.synchronize_segment('segmentA') assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) @@ -355,6 +412,11 @@ async def test_synchronize_segment_cdn(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def change_number_mock(segment_name): change_number_mock._count_a += 1 @@ -400,7 +462,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) await segments_synchronizer.synchronize_segment('segmentA') assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) @@ -415,7 +477,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): @pytest.mark.asyncio async def test_recreate(self, mocker): """Test recreate logic.""" - segments_synchronizer = SegmentSynchronizerAsync(mocker.Mock(), mocker.Mock(), mocker.Mock()) + segments_synchronizer = SegmentSynchronizerAsync(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) current_pool = segments_synchronizer._worker_pool await segments_synchronizer.shutdown() segments_synchronizer.recreate() diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 3afb1f0d..c0ea38fb 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -499,7 +499,7 @@ class SplitsSynchronizerAsyncTests(object): async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) api = mocker.Mock() async def run(x, y, c): @@ -531,7 +531,7 @@ def intersect(sets): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) async def change_number_mock(): change_number_mock._calls += 1 @@ -571,6 +571,16 @@ async def update(parsed_rbs, deleted, chanhe_number): self.parsed_rbs = parsed_rbs rbs_storage.update = update + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + + self.clear2 = False + async def clear(): + self.clear2 = True + rbs_storage.clear = clear + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None @@ -599,6 +609,7 @@ async def get_changes(change_number, rbs_change_number, fetch_options): } get_changes.called = 0 api.fetch_splits = get_changes + api.clear_storage.return_value = False split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits() @@ -618,7 +629,7 @@ async def get_changes(change_number, rbs_change_number, fetch_options): async def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) class flag_set_filter(): def should_filter(): @@ -651,7 +662,7 @@ async def test_synchronize_splits_cdn(self, mocker): """Test split sync with bypassing cdn.""" mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: @@ -741,6 +752,16 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + + self.clear2 = False + async def clear(): + self.clear2 = True + rbs_storage.clear = clear + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 42985e4c..60ab7993 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -106,6 +106,8 @@ def test_sync_all_failed_segments(self, mocker): storage = mocker.Mock() split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None @@ -113,7 +115,7 @@ def run(x, y, c): raise APIException("something broke") api.fetch_segment.side_effect = run - segment_sync = SegmentSynchronizer(api, split_storage, storage) + segment_sync = SegmentSynchronizer(api, split_storage, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -132,7 +134,7 @@ def test_synchronize_splits(self, mocker): segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', 'key3'], 'removed': [], 'since': 123, 'till': 123} - segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -176,6 +178,7 @@ def sync_segments(*_): def test_sync_all(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] split_storage.get_change_number.return_value = 123 split_storage.get_segment_names.return_value = ['segmentA'] class flag_set_filter(): @@ -197,12 +200,19 @@ def intersect(sets): segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', 'key3'], 'removed': [], 'since': 123, 'till': 123} - segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) +# pytest.set_trace() + self.clear = False + def clear(): + self.clear = True + split_storage.clear = clear + rbs_storage.clear = clear + synchronizer.sync_all() inserted_split = split_storage.update.mock_calls[0][1][0][0] @@ -462,7 +472,7 @@ async def test_sync_all_failed_segments(self, mocker): api = mocker.Mock() storage = mocker.Mock() split_storage = mocker.Mock(spec=SplitStorage) - split_storage.get_segment_names.return_value = ['segmentA'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None @@ -474,7 +484,11 @@ async def get_segment_names(): return ['seg'] split_storage.get_segment_names = get_segment_names - segment_sync = SegmentSynchronizerAsync(api, split_storage, storage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + + segment_sync = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -508,7 +522,7 @@ async def fetch_segment(segment_name, change, options): 'key3'], 'removed': [], 'since': 123, 'till': 123} segment_api.fetch_segment = fetch_segment - segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -613,7 +627,7 @@ async def fetch_segment(segment_name, change, options): 'removed': [], 'since': 123, 'till': 123} segment_api.fetch_segment = fetch_segment - segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) diff --git a/tests/tasks/test_segment_sync.py b/tests/tasks/test_segment_sync.py index d5640709..cc701e52 100644 --- a/tests/tasks/test_segment_sync.py +++ b/tests/tasks/test_segment_sync.py @@ -6,7 +6,7 @@ from splitio.api.commons import FetchOptions from splitio.tasks import segment_sync -from splitio.storage import SegmentStorage, SplitStorage +from splitio.storage import SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.models.splits import Split from splitio.models.segments import Segment from splitio.models.grammar.condition import Condition @@ -21,6 +21,8 @@ def test_normal_operation(self, mocker): """Test the normal operation flow.""" split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. @@ -65,7 +67,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTask(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -99,6 +101,8 @@ def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching segments fails at some_point, the task will continue running.""" split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. @@ -142,7 +146,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTask(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -183,6 +187,11 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -241,7 +250,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTaskAsync(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -251,12 +260,16 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): await task.stop() assert not task.is_running() - assert (self.segment_name[0], self.change_number[0], self.fetch_options[0]) == ('segmentA', -1, fetch_options) - assert (self.segment_name[1], self.change_number[1], self.fetch_options[1]) == ('segmentA', 123, fetch_options) - assert (self.segment_name[2], self.change_number[2], self.fetch_options[2]) == ('segmentB', -1, fetch_options) - assert (self.segment_name[3], self.change_number[3], self.fetch_options[3]) == ('segmentB', 123, fetch_options) - assert (self.segment_name[4], self.change_number[4], self.fetch_options[4]) == ('segmentC', -1, fetch_options) - assert (self.segment_name[5], self.change_number[5], self.fetch_options[5]) == ('segmentC', 123, fetch_options) + api_calls = [] + for i in range(6): + api_calls.append((self.segment_name[i], self.change_number[i], self.fetch_options[i])) + + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segments: @@ -272,6 +285,11 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -329,7 +347,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTaskAsync(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -338,12 +356,16 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): await task.stop() assert not task.is_running() - - assert (self.segment_name[0], self.change_number[0], self.fetch_options[0]) == ('segmentA', -1, fetch_options) - assert (self.segment_name[1], self.change_number[1], self.fetch_options[1]) == ('segmentA', 123, fetch_options) - assert (self.segment_name[2], self.change_number[2], self.fetch_options[2]) == ('segmentB', -1, fetch_options) - assert (self.segment_name[3], self.change_number[3], self.fetch_options[3]) == ('segmentC', -1, fetch_options) - assert (self.segment_name[4], self.change_number[4], self.fetch_options[4]) == ('segmentC', 123, fetch_options) + + api_calls = [] + for i in range(5): + api_calls.append((self.segment_name[i], self.change_number[i], self.fetch_options[i])) + + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segments: diff --git a/tests/tasks/test_split_sync.py b/tests/tasks/test_split_sync.py index c1ec3620..c9a0c692 100644 --- a/tests/tasks/test_split_sync.py +++ b/tests/tasks/test_split_sync.py @@ -73,6 +73,12 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] + self.clear = False + def clear(): + self.clear = True + storage.clear = clear + rbs_storage.clear = clear + api = mocker.Mock() def get_changes(*args, **kwargs): @@ -172,6 +178,12 @@ async def set_change_number(*_): pass change_number_mock._calls = 0 storage.set_change_number = set_change_number + + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + rbs_storage.clear = clear api = mocker.Mock() self.change_number = [] diff --git a/tests/util/test_storage_helper.py b/tests/util/test_storage_helper.py index ee5fe318..5804a6fa 100644 --- a/tests/util/test_storage_helper.py +++ b/tests/util/test_storage_helper.py @@ -2,7 +2,8 @@ import pytest from splitio.util.storage_helper import update_feature_flag_storage, get_valid_flag_sets, combine_valid_flag_sets, \ - update_rule_based_segment_storage, update_rule_based_segment_storage_async, update_feature_flag_storage_async + update_rule_based_segment_storage, update_rule_based_segment_storage_async, update_feature_flag_storage_async, \ + get_standard_segment_names_in_rbs_storage_async, get_standard_segment_names_in_rbs_storage from splitio.storage.inmemmory import InMemorySplitStorage, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync, \ InMemorySplitStorageAsync from splitio.models import splits, rule_based_segments @@ -18,7 +19,7 @@ class StorageHelperTests(object): "trafficTypeName": "user", "excluded":{ "keys":["mauro@split.io","gaston@split.io"], - "segments":['excluded_segment'] + "segments":[{"name":"excluded_segment", "type": "standard"}] }, "conditions": [ {"matcherGroup": { @@ -190,6 +191,17 @@ def clear(): segments = update_rule_based_segment_storage(storage, [self.rbs], 123, True) assert self.clear == 1 + + def test_get_standard_segment_in_rbs_storage(self, mocker): + storage = InMemoryRuleBasedSegmentStorage() + segments = update_rule_based_segment_storage(storage, [self.rbs], 123) + assert get_standard_segment_names_in_rbs_storage(storage) == {'excluded_segment', 'employees'} + + @pytest.mark.asyncio + async def test_get_standard_segment_in_rbs_storage(self, mocker): + storage = InMemoryRuleBasedSegmentStorageAsync() + segments = await update_rule_based_segment_storage_async(storage, [self.rbs], 123) + assert await get_standard_segment_names_in_rbs_storage_async(storage) == {'excluded_segment', 'employees'} @pytest.mark.asyncio async def test_update_rule_base_segment_storage_async(self, mocker):