Skip to content

Commit f8e363d

Browse files
chore: Preparing for release (#229)
1 parent 955712b commit f8e363d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

58 files changed

+16935
-17918
lines changed

.flake8

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
[flake8]
2+
# E722 - do not use bare 'except'
3+
ignore = E722
4+
exclude = optimizely/lib/pymmh3.py,*virtualenv*
5+
max-line-length = 120

CHANGELOG.md

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Optimizely Python SDK Changelog
22

3+
## 3.3.1
4+
December 16th, 2019
5+
6+
### Bug Fixes:
7+
* Fixed [installation issue](https://github.com/optimizely/python-sdk/issues/220) on Windows. ([#224](https://github.com/optimizely/python-sdk/pull/224))
8+
* Fixed batch event processor deadline reset issue. ([#227](https://github.com/optimizely/python-sdk/pull/227))
9+
* Added more batch event processor debug messages. ([#227](https://github.com/optimizely/python-sdk/pull/227))
10+
311
## 3.3.0
412
October 28th, 2019
513

@@ -77,12 +85,12 @@ targeting functionality.
7785
* Note that for results segmentation in Optimizely results, the user attribute values from one event are automatically applied to all other events in the same session, as long as the events in question were actually received by our backend. This behavior was already in place and is not affected by the 3.0 release.
7886
* Support for all types of attribute values, not just strings.
7987
* All values are passed through to notification listeners.
80-
* Strings, booleans, and valid numbers are passed to the event dispatcher and can be used for Optimizely results segmentation. A valid number is a finite float or numbers.Integral in the inclusive range \[-2⁵³, 2⁵³\].
88+
* Strings, booleans, and valid numbers are passed to the event dispatcher and can be used for Optimizely results segmentation. A valid number is a finite float or numbers.Integral in the inclusive range \[-2 ^ 53, 2 ^ 53\].
8189
* Strings, booleans, and valid numbers are relevant for audience conditions.
8290
* Support for additional matchers in audience conditions:
8391
* An `exists` matcher that passes if the user has a non-null value for the targeted user attribute and fails otherwise.
8492
* A `substring` matcher that resolves if the user has a string value for the targeted attribute.
85-
* `gt` (greater than) and `lt` (less than) matchers that resolve if the user has a valid number value for the targeted attribute. A valid number is a finite float or numbers.Integral in the inclusive range \[-2⁵³, 2⁵³\].
93+
* `gt` (greater than) and `lt` (less than) matchers that resolve if the user has a valid number value for the targeted attribute. A valid number is a finite float or numbers.Integral in the inclusive range \[-2 ^ 53, 2 ^ 53\].
8694
* The original (`exact`) matcher can now be used to target booleans and valid numbers, not just strings.
8795
* Support for A/B tests, feature tests, and feature rollouts whose audiences are combined using `"and"` and `"not"` operators, not just the `"or"` operator.
8896
* Datafile-version compatibility check: The SDK will remain uninitialized (i.e., will gracefully fail to activate experiments and features) if given a datafile version greater than 4.

optimizely/bucketer.py

Lines changed: 66 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@
1212
# limitations under the License.
1313

1414
import math
15+
1516
try:
16-
import mmh3
17+
import mmh3
1718
except ImportError:
18-
from .lib import pymmh3 as mmh3
19+
from .lib import pymmh3 as mmh3
1920

2021

2122
MAX_TRAFFIC_VALUE = 10000
@@ -27,15 +28,15 @@
2728

2829

2930
class Bucketer(object):
30-
""" Optimizely bucketing algorithm that evenly distributes visitors. """
31+
""" Optimizely bucketing algorithm that evenly distributes visitors. """
3132

32-
def __init__(self):
33-
""" Bucketer init method to set bucketing seed and logger instance. """
33+
def __init__(self):
34+
""" Bucketer init method to set bucketing seed and logger instance. """
3435

35-
self.bucket_seed = HASH_SEED
36+
self.bucket_seed = HASH_SEED
3637

37-
def _generate_unsigned_hash_code_32_bit(self, bucketing_id):
38-
""" Helper method to retrieve hash code.
38+
def _generate_unsigned_hash_code_32_bit(self, bucketing_id):
39+
""" Helper method to retrieve hash code.
3940
4041
Args:
4142
bucketing_id: ID for bucketing.
@@ -44,11 +45,11 @@ def _generate_unsigned_hash_code_32_bit(self, bucketing_id):
4445
Hash code which is a 32 bit unsigned integer.
4546
"""
4647

47-
# Adjusting MurmurHash code to be unsigned
48-
return (mmh3.hash(bucketing_id, self.bucket_seed) & UNSIGNED_MAX_32_BIT_VALUE)
48+
# Adjusting MurmurHash code to be unsigned
49+
return mmh3.hash(bucketing_id, self.bucket_seed) & UNSIGNED_MAX_32_BIT_VALUE
4950

50-
def _generate_bucket_value(self, bucketing_id):
51-
""" Helper function to generate bucket value in half-closed interval [0, MAX_TRAFFIC_VALUE).
51+
def _generate_bucket_value(self, bucketing_id):
52+
""" Helper function to generate bucket value in half-closed interval [0, MAX_TRAFFIC_VALUE).
5253
5354
Args:
5455
bucketing_id: ID for bucketing.
@@ -57,11 +58,11 @@ def _generate_bucket_value(self, bucketing_id):
5758
Bucket value corresponding to the provided bucketing ID.
5859
"""
5960

60-
ratio = float(self._generate_unsigned_hash_code_32_bit(bucketing_id)) / MAX_HASH_VALUE
61-
return math.floor(ratio * MAX_TRAFFIC_VALUE)
61+
ratio = float(self._generate_unsigned_hash_code_32_bit(bucketing_id)) / MAX_HASH_VALUE
62+
return math.floor(ratio * MAX_TRAFFIC_VALUE)
6263

63-
def find_bucket(self, project_config, bucketing_id, parent_id, traffic_allocations):
64-
""" Determine entity based on bucket value and traffic allocations.
64+
def find_bucket(self, project_config, bucketing_id, parent_id, traffic_allocations):
65+
""" Determine entity based on bucket value and traffic allocations.
6566
6667
Args:
6768
project_config: Instance of ProjectConfig.
@@ -73,22 +74,21 @@ def find_bucket(self, project_config, bucketing_id, parent_id, traffic_allocatio
7374
Entity ID which may represent experiment or variation.
7475
"""
7576

76-
bucketing_key = BUCKETING_ID_TEMPLATE.format(bucketing_id=bucketing_id, parent_id=parent_id)
77-
bucketing_number = self._generate_bucket_value(bucketing_key)
78-
project_config.logger.debug('Assigned bucket %s to user with bucketing ID "%s".' % (
79-
bucketing_number,
80-
bucketing_id
81-
))
77+
bucketing_key = BUCKETING_ID_TEMPLATE.format(bucketing_id=bucketing_id, parent_id=parent_id)
78+
bucketing_number = self._generate_bucket_value(bucketing_key)
79+
project_config.logger.debug(
80+
'Assigned bucket %s to user with bucketing ID "%s".' % (bucketing_number, bucketing_id)
81+
)
8282

83-
for traffic_allocation in traffic_allocations:
84-
current_end_of_range = traffic_allocation.get('endOfRange')
85-
if bucketing_number < current_end_of_range:
86-
return traffic_allocation.get('entityId')
83+
for traffic_allocation in traffic_allocations:
84+
current_end_of_range = traffic_allocation.get('endOfRange')
85+
if bucketing_number < current_end_of_range:
86+
return traffic_allocation.get('entityId')
8787

88-
return None
88+
return None
8989

90-
def bucket(self, project_config, experiment, user_id, bucketing_id):
91-
""" For a given experiment and bucketing ID determines variation to be shown to user.
90+
def bucket(self, project_config, experiment, user_id, bucketing_id):
91+
""" For a given experiment and bucketing ID determines variation to be shown to user.
9292
9393
Args:
9494
project_config: Instance of ProjectConfig.
@@ -100,45 +100,41 @@ def bucket(self, project_config, experiment, user_id, bucketing_id):
100100
Variation in which user with ID user_id will be put in. None if no variation.
101101
"""
102102

103-
if not experiment:
104-
return None
105-
106-
# Determine if experiment is in a mutually exclusive group
107-
if experiment.groupPolicy in GROUP_POLICIES:
108-
group = project_config.get_group(experiment.groupId)
109-
110-
if not group:
103+
if not experiment:
104+
return None
105+
106+
# Determine if experiment is in a mutually exclusive group
107+
if experiment.groupPolicy in GROUP_POLICIES:
108+
group = project_config.get_group(experiment.groupId)
109+
110+
if not group:
111+
return None
112+
113+
user_experiment_id = self.find_bucket(
114+
project_config, bucketing_id, experiment.groupId, group.trafficAllocation,
115+
)
116+
if not user_experiment_id:
117+
project_config.logger.info('User "%s" is in no experiment.' % user_id)
118+
return None
119+
120+
if user_experiment_id != experiment.id:
121+
project_config.logger.info(
122+
'User "%s" is not in experiment "%s" of group %s.' % (user_id, experiment.key, experiment.groupId)
123+
)
124+
return None
125+
126+
project_config.logger.info(
127+
'User "%s" is in experiment %s of group %s.' % (user_id, experiment.key, experiment.groupId)
128+
)
129+
130+
# Bucket user if not in white-list and in group (if any)
131+
variation_id = self.find_bucket(project_config, bucketing_id, experiment.id, experiment.trafficAllocation)
132+
if variation_id:
133+
variation = project_config.get_variation_from_id(experiment.key, variation_id)
134+
project_config.logger.info(
135+
'User "%s" is in variation "%s" of experiment %s.' % (user_id, variation.key, experiment.key)
136+
)
137+
return variation
138+
139+
project_config.logger.info('User "%s" is in no variation.' % user_id)
111140
return None
112-
113-
user_experiment_id = self.find_bucket(project_config, bucketing_id, experiment.groupId, group.trafficAllocation)
114-
if not user_experiment_id:
115-
project_config.logger.info('User "%s" is in no experiment.' % user_id)
116-
return None
117-
118-
if user_experiment_id != experiment.id:
119-
project_config.logger.info('User "%s" is not in experiment "%s" of group %s.' % (
120-
user_id,
121-
experiment.key,
122-
experiment.groupId
123-
))
124-
return None
125-
126-
project_config.logger.info('User "%s" is in experiment %s of group %s.' % (
127-
user_id,
128-
experiment.key,
129-
experiment.groupId
130-
))
131-
132-
# Bucket user if not in white-list and in group (if any)
133-
variation_id = self.find_bucket(project_config, bucketing_id, experiment.id, experiment.trafficAllocation)
134-
if variation_id:
135-
variation = project_config.get_variation_from_id(experiment.key, variation_id)
136-
project_config.logger.info('User "%s" is in variation "%s" of experiment %s.' % (
137-
user_id,
138-
variation.key,
139-
experiment.key
140-
))
141-
return variation
142-
143-
project_config.logger.info('User "%s" is in no variation.' % user_id)
144-
return None

optimizely/config_manager.py

Lines changed: 51 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,7 @@
3333
class BaseConfigManager(ABC):
3434
""" Base class for Optimizely's config manager. """
3535

36-
def __init__(self,
37-
logger=None,
38-
error_handler=None,
39-
notification_center=None):
36+
def __init__(self, logger=None, error_handler=None, notification_center=None):
4037
""" Initialize config manager.
4138
4239
Args:
@@ -74,12 +71,9 @@ def get_config(self):
7471
class StaticConfigManager(BaseConfigManager):
7572
""" Config manager that returns ProjectConfig based on provided datafile. """
7673

77-
def __init__(self,
78-
datafile=None,
79-
logger=None,
80-
error_handler=None,
81-
notification_center=None,
82-
skip_json_validation=False):
74+
def __init__(
75+
self, datafile=None, logger=None, error_handler=None, notification_center=None, skip_json_validation=False,
76+
):
8377
""" Initialize config manager. Datafile has to be provided to use.
8478
8579
Args:
@@ -91,9 +85,9 @@ def __init__(self,
9185
validation upon object invocation. By default
9286
JSON schema validation will be performed.
9387
"""
94-
super(StaticConfigManager, self).__init__(logger=logger,
95-
error_handler=error_handler,
96-
notification_center=notification_center)
88+
super(StaticConfigManager, self).__init__(
89+
logger=logger, error_handler=error_handler, notification_center=notification_center,
90+
)
9791
self._config = None
9892
self.validate_schema = not skip_json_validation
9993
self._set_config(datafile)
@@ -153,17 +147,19 @@ def get_config(self):
153147
class PollingConfigManager(StaticConfigManager):
154148
""" Config manager that polls for the datafile and updated ProjectConfig based on an update interval. """
155149

156-
def __init__(self,
157-
sdk_key=None,
158-
datafile=None,
159-
update_interval=None,
160-
blocking_timeout=None,
161-
url=None,
162-
url_template=None,
163-
logger=None,
164-
error_handler=None,
165-
notification_center=None,
166-
skip_json_validation=False):
150+
def __init__(
151+
self,
152+
sdk_key=None,
153+
datafile=None,
154+
update_interval=None,
155+
blocking_timeout=None,
156+
url=None,
157+
url_template=None,
158+
logger=None,
159+
error_handler=None,
160+
notification_center=None,
161+
skip_json_validation=False,
162+
):
167163
""" Initialize config manager. One of sdk_key or url has to be set to be able to use.
168164
169165
Args:
@@ -185,13 +181,16 @@ def __init__(self,
185181
186182
"""
187183
self._config_ready_event = threading.Event()
188-
super(PollingConfigManager, self).__init__(datafile=datafile,
189-
logger=logger,
190-
error_handler=error_handler,
191-
notification_center=notification_center,
192-
skip_json_validation=skip_json_validation)
193-
self.datafile_url = self.get_datafile_url(sdk_key, url,
194-
url_template or enums.ConfigManager.DATAFILE_URL_TEMPLATE)
184+
super(PollingConfigManager, self).__init__(
185+
datafile=datafile,
186+
logger=logger,
187+
error_handler=error_handler,
188+
notification_center=notification_center,
189+
skip_json_validation=skip_json_validation,
190+
)
191+
self.datafile_url = self.get_datafile_url(
192+
sdk_key, url, url_template or enums.ConfigManager.DATAFILE_URL_TEMPLATE
193+
)
195194
self.set_update_interval(update_interval)
196195
self.set_blocking_timeout(blocking_timeout)
197196
self.last_modified = None
@@ -227,7 +226,8 @@ def get_datafile_url(sdk_key, url, url_template):
227226
return url_template.format(sdk_key=sdk_key)
228227
except (AttributeError, KeyError):
229228
raise optimizely_exceptions.InvalidInputException(
230-
'Invalid url_template {} provided.'.format(url_template))
229+
'Invalid url_template {} provided.'.format(url_template)
230+
)
231231

232232
return url
233233

@@ -238,8 +238,8 @@ def _set_config(self, datafile):
238238
datafile: JSON string representing the Optimizely project.
239239
"""
240240
if datafile or self._config_ready_event.is_set():
241-
super(PollingConfigManager, self)._set_config(datafile=datafile)
242-
self._config_ready_event.set()
241+
super(PollingConfigManager, self)._set_config(datafile=datafile)
242+
self._config_ready_event.set()
243243

244244
def get_config(self):
245245
""" Returns instance of ProjectConfig. Returns immediately if project config is ready otherwise
@@ -269,9 +269,10 @@ def set_update_interval(self, update_interval):
269269

270270
# If polling interval is less than or equal to 0 then set it to default update interval.
271271
if update_interval <= 0:
272-
self.logger.debug('update_interval value {} too small. Defaulting to {}'.format(
273-
update_interval,
274-
enums.ConfigManager.DEFAULT_UPDATE_INTERVAL)
272+
self.logger.debug(
273+
'update_interval value {} too small. Defaulting to {}'.format(
274+
update_interval, enums.ConfigManager.DEFAULT_UPDATE_INTERVAL
275+
)
275276
)
276277
update_interval = enums.ConfigManager.DEFAULT_UPDATE_INTERVAL
277278

@@ -294,9 +295,10 @@ def set_blocking_timeout(self, blocking_timeout):
294295

295296
# If blocking timeout is less than 0 then set it to default blocking timeout.
296297
if blocking_timeout < 0:
297-
self.logger.debug('blocking timeout value {} too small. Defaulting to {}'.format(
298-
blocking_timeout,
299-
enums.ConfigManager.DEFAULT_BLOCKING_TIMEOUT)
298+
self.logger.debug(
299+
'blocking timeout value {} too small. Defaulting to {}'.format(
300+
blocking_timeout, enums.ConfigManager.DEFAULT_BLOCKING_TIMEOUT
301+
)
300302
)
301303
blocking_timeout = enums.ConfigManager.DEFAULT_BLOCKING_TIMEOUT
302304

@@ -337,9 +339,9 @@ def fetch_datafile(self):
337339
if self.last_modified:
338340
request_headers[enums.HTTPHeaders.IF_MODIFIED_SINCE] = self.last_modified
339341

340-
response = requests.get(self.datafile_url,
341-
headers=request_headers,
342-
timeout=enums.ConfigManager.REQUEST_TIMEOUT)
342+
response = requests.get(
343+
self.datafile_url, headers=request_headers, timeout=enums.ConfigManager.REQUEST_TIMEOUT,
344+
)
343345
self._handle_response(response)
344346

345347
@property
@@ -350,12 +352,13 @@ def is_running(self):
350352
def _run(self):
351353
""" Triggered as part of the thread which fetches the datafile and sleeps until next update interval. """
352354
try:
353-
while self.is_running:
354-
self.fetch_datafile()
355-
time.sleep(self.update_interval)
355+
while self.is_running:
356+
self.fetch_datafile()
357+
time.sleep(self.update_interval)
356358
except (OSError, OverflowError) as err:
357-
self.logger.error('Error in time.sleep. '
358-
'Provided update_interval value may be too big. Error: {}'.format(str(err)))
359+
self.logger.error(
360+
'Error in time.sleep. ' 'Provided update_interval value may be too big. Error: {}'.format(str(err))
361+
)
359362
raise
360363

361364
def start(self):

0 commit comments

Comments
 (0)