Skip to content

Commit

Permalink
green_mode: Choose job count based on environment
Browse files Browse the repository at this point in the history
Python 3.4 and 3.5 fail under our pytest configuration
when the number of jobs is the cpu count - 1.
On Python 3.4, an extra cpu needs to be reserved for the tests
to operate smoothly on CI, and on Python 3.5 and Windows,
multiprocessing needs to be disabled entirely.

Expose argument jobs throughout green_mode, so that it
can be also exposed to the user or test suite for more
controlled use and testing.

Related to coala#295
  • Loading branch information
jayvdb committed Aug 15, 2018
1 parent 5027672 commit b3cf751
Show file tree
Hide file tree
Showing 3 changed files with 126 additions and 20 deletions.
84 changes: 66 additions & 18 deletions coala_quickstart/green_mode/green_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import itertools
import operator
import os
import sys
from copy import deepcopy
from pathlib import Path

Expand Down Expand Up @@ -41,6 +42,21 @@


settings_key = 'green_mode_infinite_value_settings'
_CI_PYTEST_ACTIVE = os.environ.get('CI') and os.environ.get('PYTEST')
_PYTHON_VERSION_MINOR = sys.version_info[0:2]
_RESERVE_CPUS = 1

if _CI_PYTEST_ACTIVE: # pragma no branch; pragma Python 3.6,3.7: no cover
if _PYTHON_VERSION_MINOR == (3, 5): # pragma Python 3.4: no cover
# Python 3.5 runs jobs even with _RESERVE_CPUS set to 2
_RESERVE_CPUS = sys.maxsize
elif _PYTHON_VERSION_MINOR == (3, 4): # pragma Python 3.5: no cover
_RESERVE_CPUS = 2
elif os.name == 'nt': # pragma posix: no cover
# FIXME: Multiprocessing not working on windows.
_RESERVE_CPUS = sys.maxsize
else: # pragma Python 3.4,3.5: no cover; pragma nt: no cover
pass


def initialize_project_data(dir, ignore_globs):
Expand Down Expand Up @@ -272,13 +288,38 @@ def check_bear_results(ret_val, ignore_ranges):
return True


def _create_mp_pool(jobs: int = 0):
"""
Create a multiprocessing pool.
:param jobs: Number of jobs to run concurrently.
0 means auto-detect. 1 means no pool.
"""
if not isinstance(jobs, int):
raise TypeError('jobs must be an int')
if jobs == 1:
return
if jobs < 0:
raise ValueError('jobs must be 0 or a positive integer')

import multiprocessing as mp
cpu_count = mp.cpu_count()
if cpu_count <= _RESERVE_CPUS:
return
if jobs == 0 or jobs > cpu_count - _RESERVE_CPUS:
jobs = cpu_count - _RESERVE_CPUS
pool = mp.Pool(processes=jobs)
return pool


def local_bear_test(bear, file_dict, file_names, lang, kwargs,
ignore_ranges):
ignore_ranges,
jobs: int = 0,
):
lang_files = split_by_language(file_names)
lang_files = {k.lower(): v for k, v in lang_files.items()}

import multiprocessing as mp
pool = mp.Pool(processes=mp.cpu_count()-1)
pool = _create_mp_pool(jobs)

file_results = []

Expand Down Expand Up @@ -317,12 +358,11 @@ def local_bear_test(bear, file_dict, file_names, lang, kwargs,
bear_obj = bear(section, None)
ret_val = bear_obj.run(**dict(zip(kwargs, vals)))
ret_val = [] if not ret_val else list(ret_val)
# FIXME: Multiprocessing not working on windows.
if os.name == 'nt': # pragma posix: no cover
results.append(check_bear_results(ret_val, ignore_ranges))
else: # pragma nt: no cover
if pool: # pragma Python 3.5: no cover; pragma nt: no cover
results.append(pool.apply(check_bear_results,
args=(ret_val, ignore_ranges)))
else: # pragma Python 3.4,3.6,3.7: no cover
results.append(check_bear_results(ret_val, ignore_ranges))

for index, result in enumerate(results):
if result is True:
Expand All @@ -335,14 +375,15 @@ def local_bear_test(bear, file_dict, file_names, lang, kwargs,
return {bear: file_results}


def global_bear_test(bear, file_dict, kwargs, ignore_ranges):
import multiprocessing as mp
pool = mp.Pool(processes=mp.cpu_count()-1)

def global_bear_test(bear, file_dict, kwargs, ignore_ranges,
jobs: int = 0,
):
results = []
values = []
file_results = []

pool = _create_mp_pool(jobs)

for vals in itertools.product(*kwargs.values()):
values.append(vals)
section = Section('test-section-global-bear')
Expand All @@ -351,11 +392,11 @@ def global_bear_test(bear, file_dict, kwargs, ignore_ranges):
bear_obj.file_dict = file_dict
ret_val = bear_obj.run(**dict(zip(kwargs, vals)))
ret_val = list(ret_val)
if os.name == 'nt': # pragma posix: no cover
results.append(check_bear_results(ret_val, ignore_ranges))
else: # pragma nt: no cover
if pool: # pragma Python 3.5: no cover; pragma nt: no cover
results.append(pool.apply(check_bear_results,
args=(ret_val, ignore_ranges)))
else: # pragma Python 3.4,3.6,3.7: no cover
results.append(check_bear_results(ret_val, ignore_ranges))

for index, result in enumerate(results):
if result is True:
Expand All @@ -367,7 +408,9 @@ def global_bear_test(bear, file_dict, kwargs, ignore_ranges):


def run_test_on_each_bear(bear, file_dict, file_names, lang, kwargs,
ignore_ranges, type_of_setting, printer=None):
ignore_ranges, type_of_setting, printer=None,
jobs: int = 0,
):
if type_of_setting == 'non-op':
printer.print('Finding suitable values to necessary '
'settings for ' + bear.__name__ +
Expand All @@ -389,7 +432,8 @@ def run_test_on_each_bear(bear, file_dict, file_names, lang, kwargs,

def bear_test_fun(bears, bear_settings_obj, file_dict, ignore_ranges,
contents, file_names, op_args_limit, value_to_op_args_limit,
printer=None):
printer=None,
jobs: int = 0):
"""
Tests the bears with the generated file dict and list of files
along with the values recieved for each and every type of setting
Expand Down Expand Up @@ -440,7 +484,9 @@ def bear_test_fun(bears, bear_settings_obj, file_dict, ignore_ranges,
op_kwargs = get_kwargs(op_set, bear, contents)
non_op_file_results = run_test_on_each_bear(
bear, file_dict, file_names, lang, non_op_kwargs,
ignore_ranges, 'non-op', printer)
ignore_ranges, 'non-op', printer,
jobs=jobs,
)
if len(op_kwargs) < op_args_limit and not(
True in [len(value) > value_to_op_args_limit
for key, value in op_kwargs.items()]):
Expand All @@ -449,7 +495,9 @@ def bear_test_fun(bears, bear_settings_obj, file_dict, ignore_ranges,
unified_file_results = run_test_on_each_bear(
bear, file_dict, file_names, lang,
unified_kwargs, ignore_ranges, 'unified',
printer)
printer,
jobs=jobs,
)
else:
unified_file_results = None
final_non_op_results.append(non_op_file_results)
Expand Down
3 changes: 3 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ doctest_optionflags =
ELLIPSIS
IGNORE_EXCEPTION_DETAIL

env =
PYTEST=1

reqsfilenamepatterns =
requirements.txt
test-requirements.txt
Expand Down
59 changes: 57 additions & 2 deletions tests/green_mode/green_modeTest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import operator
import os
import sys
import unittest
import yaml
from copy import deepcopy
Expand All @@ -13,6 +14,7 @@
from coala_quickstart.green_mode.Setting import (
find_max_min_of_setting,
)
from coala_quickstart.green_mode import green_mode
from coala_quickstart.green_mode.green_mode import (
bear_test_fun,
check_bear_results,
Expand Down Expand Up @@ -571,5 +573,58 @@ def test_write_coafile(self):
self.assertIn(line, [i.strip('\\').replace('\\\\C', 'C')
for i in contents.split('\n')])

def test_green_mode(self):
pass

class MultiProcessingTest(unittest.TestCase):

def setUp(self):
self.orig_cpus = green_mode._RESERVE_CPUS

def tearDown(self):
green_mode._RESERVE_CPUS = self.orig_cpus

def test_no_pool(self):
with self.assertRaises(TypeError):
green_mode._create_mp_pool(None)
with self.assertRaises(ValueError):
self.assertIsNone(green_mode._create_mp_pool(-1))
self.assertIsNone(green_mode._create_mp_pool(1))

def test_reserve_cpus(self):
green_mode._RESERVE_CPUS = 1
with patch('multiprocessing.cpu_count', return_value=1):
self.assertIsNone(green_mode._create_mp_pool(0))
green_mode._RESERVE_CPUS = 2
with patch('multiprocessing.cpu_count', return_value=2):
self.assertIsNone(green_mode._create_mp_pool(0))

green_mode._RESERVE_CPUS = 1
with patch('multiprocessing.cpu_count', return_value=2):
self.assertIsNotNone(green_mode._create_mp_pool(0))

green_mode._RESERVE_CPUS = 1
with patch('multiprocessing.cpu_count', return_value=2):
self.assertIsNotNone(green_mode._create_mp_pool(2))

green_mode._RESERVE_CPUS = 1
with patch('multiprocessing.cpu_count', return_value=10):
self.assertIsNotNone(green_mode._create_mp_pool(2))

def test_ci_pool_min(self):
if not os.environ.get('CI'):
return

if sys.version_info[0:2] == (3, 4):
with patch('multiprocessing.cpu_count', return_value=2):
self.assertIsNone(green_mode._create_mp_pool(0))
with patch('multiprocessing.cpu_count', return_value=3):
self.assertIsNotNone(green_mode._create_mp_pool(0))
elif sys.version_info[0:2] == (3, 5):
with patch('multiprocessing.cpu_count', return_value=3):
self.assertIsNone(green_mode._create_mp_pool(0))
# Python 3.5 is forced to not be mp under CI & pytest
with patch('multiprocessing.cpu_count', return_value=100):
self.assertIsNone(green_mode._create_mp_pool(0))
else:
with patch('multiprocessing.cpu_count', return_value=2):
self.assertIsNotNone(green_mode._create_mp_pool(2))
self.assertIsNotNone(green_mode._create_mp_pool(0))

0 comments on commit b3cf751

Please sign in to comment.