diff --git a/.evergreen-functions.yml b/.evergreen-functions.yml index 98a898321..e21903a53 100644 --- a/.evergreen-functions.yml +++ b/.evergreen-functions.yml @@ -513,7 +513,7 @@ functions: shell: bash <<: *e2e_include_expansions_in_env working_dir: src/github.com/mongodb/mongodb-kubernetes - binary: scripts/evergreen/run_python.sh pipeline.py --include ${image_name} --parallel --sign + binary: scripts/evergreen/run_python.sh pipeline.py --image ${image_name} --parallel --sign teardown_cloud_qa_all: - *switch_context diff --git a/Makefile b/Makefile index db87323df..ee558b9fa 100644 --- a/Makefile +++ b/Makefile @@ -69,13 +69,13 @@ operator: configure-operator build-and-push-operator-image # build-push, (todo) restart database database: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include database + @ scripts/evergreen/run_python.sh pipeline.py --image database readiness_probe: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include readiness-probe + @ scripts/evergreen/run_python.sh pipeline.py --image readiness-probe upgrade_hook: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include upgrade-hook + @ scripts/evergreen/run_python.sh pipeline.py --image upgrade-hook # ensures cluster is up, cleans Kubernetes + OM, build-push-deploy operator, # push-deploy database, create secrets, config map, resources etc @@ -84,7 +84,7 @@ full: build-and-push-images # build-push appdb image appdb: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include appdb + @ scripts/evergreen/run_python.sh pipeline.py --image appdb # runs the e2e test: make e2e test=e2e_sharded_cluster_pv. The Operator is redeployed before the test, the namespace is cleaned. # The e2e test image is built and pushed together with all main ones (operator, database, init containers) @@ -148,19 +148,19 @@ aws_cleanup: @ scripts/evergreen/prepare_aws.sh build-and-push-operator-image: aws_login - @ scripts/evergreen/run_python.sh pipeline.py --include operator-quick + @ scripts/evergreen/run_python.sh pipeline.py --image operator-quick build-and-push-database-image: aws_login @ scripts/dev/build_push_database_image build-and-push-test-image: aws_login build-multi-cluster-binary @ if [[ -z "$(local)" ]]; then \ - scripts/evergreen/run_python.sh pipeline.py --include test; \ + scripts/evergreen/run_python.sh pipeline.py --image test; \ fi build-and-push-mco-test-image: aws_login @ if [[ -z "$(local)" ]]; then \ - scripts/evergreen/run_python.sh pipeline.py --include mco-test; \ + scripts/evergreen/run_python.sh pipeline.py --image mco-test; \ fi build-multi-cluster-binary: @@ -175,27 +175,27 @@ build-and-push-images: build-and-push-operator-image appdb-init-image om-init-im build-and-push-init-images: appdb-init-image om-init-image database-init-image database-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-database + @ scripts/evergreen/run_python.sh pipeline.py --image init-database appdb-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-appdb + @ scripts/evergreen/run_python.sh pipeline.py --image init-appdb # Not setting a parallel-factor will default to 0 which will lead to using all CPUs, that can cause docker to die. # Here we are defaulting to 6, a higher value might work for you. agent-image: - @ scripts/evergreen/run_python.sh pipeline.py --include agent --all-agents --parallel --parallel-factor 6 + @ scripts/evergreen/run_python.sh pipeline.py --image agent --all-agents --parallel --parallel-factor 6 agent-image-slow: - @ scripts/evergreen/run_python.sh pipeline.py --include agent --parallel-factor 1 + @ scripts/evergreen/run_python.sh pipeline.py --image agent --parallel-factor 1 operator-image: - @ scripts/evergreen/run_python.sh pipeline.py --include operator + @ scripts/evergreen/run_python.sh pipeline.py --image operator om-init-image: - @ scripts/evergreen/run_python.sh pipeline.py --include init-ops-manager + @ scripts/evergreen/run_python.sh pipeline.py --image init-ops-manager om-image: - @ scripts/evergreen/run_python.sh pipeline.py --include ops-manager + @ scripts/evergreen/run_python.sh pipeline.py --image ops-manager configure-operator: @ scripts/dev/configure_operator.sh diff --git a/pipeline.py b/pipeline.py index e05e5e5a0..be40443fe 100755 --- a/pipeline.py +++ b/pipeline.py @@ -1498,58 +1498,11 @@ def build_image(image_name: str, build_configuration: BuildConfiguration): get_builder_function_for_image_name()[image_name](build_configuration) -def build_all_images( - images: Iterable[str], - builder: str, - debug: bool = False, - parallel: bool = False, - architecture: Optional[List[str]] = None, - sign: bool = False, - all_agents: bool = False, - parallel_factor: int = 0, -): - """Builds all the images in the `images` list.""" - build_configuration = operator_build_configuration( - builder, parallel, debug, architecture, sign, all_agents, parallel_factor - ) - if sign: - mongodb_artifactory_login() - for image in images: - build_image(image, build_configuration) - - -def calculate_images_to_build( - images: List[str], include: Optional[List[str]], exclude: Optional[List[str]] -) -> Set[str]: - """ - Calculates which images to build based on the `images`, `include` and `exclude` sets. - - >>> calculate_images_to_build(["a", "b"], ["a"], ["b"]) - ... ["a"] - """ - - if not include and not exclude: - return set(images) - include = set(include or []) - exclude = set(exclude or []) - images = set(images or []) - - for image in include.union(exclude): - if image not in images: - raise ValueError("Image definition {} not found".format(image)) - - images_to_build = include.intersection(images) - if exclude: - images_to_build = images.difference(exclude) - return images_to_build - - def main(): _setup_tracing() parser = argparse.ArgumentParser() - parser.add_argument("--include", action="append") - parser.add_argument("--exclude", action="append") + parser.add_argument("--image", required=True) parser.add_argument("--builder", default="docker", type=str) parser.add_argument("--list-images", action="store_true") parser.add_argument("--parallel", action="store_true", default=False) @@ -1587,13 +1540,15 @@ def main(): if not args.sign: logger.warning("--sign flag not provided, images won't be signed") - images_to_build = calculate_images_to_build( - list(get_builder_function_for_image_name().keys()), args.include, args.exclude - ) + if args.image not in get_builder_function_for_image_name(): + print("Image {} not found".format(args.image)) + sys.exit(1) + + if args.sign: + mongodb_artifactory_login() - build_all_images( - images_to_build, - args.builder, + build_configuration = operator_build_configuration( + builder=args.builder, debug=args.debug, parallel=args.parallel, architecture=args.arch, @@ -1602,6 +1557,8 @@ def main(): parallel_factor=args.parallel_factor, ) + build_image(args.image, build_configuration) + if __name__ == "__main__": main() diff --git a/pipeline_test.py b/pipeline_test.py index 671ad105e..17d3b131d 100644 --- a/pipeline_test.py +++ b/pipeline_test.py @@ -6,7 +6,7 @@ import pytest from pipeline import ( - calculate_images_to_build, + get_included_images, gather_all_supported_agent_versions, gather_latest_agent_versions, get_versions_to_rebuild, @@ -64,28 +64,6 @@ def test_operator_build_configuration_defaults(): assert config.namespace == "default" -@pytest.mark.parametrize( - "test_case", - [ - (["a", "b", "c"], ["a"], ["b"], {"a", "c"}), - (["a", "b", "c"], ["a", "b"], None, {"a", "b"}), - (["a", "b", "c"], None, ["a"], {"b", "c"}), - (["a", "b", "c"], [], [], {"a", "b", "c"}), - (["a", "b", "c"], ["d"], None, ValueError), - (["a", "b", "c"], None, ["d"], ValueError), - ([], ["a"], ["b"], ValueError), - (["a", "b", "c"], None, None, {"a", "b", "c"}), - ], -) -def test_calculate_images_to_build(test_case): - images, include, exclude, expected = test_case - if expected is ValueError: - with pytest.raises(ValueError): - calculate_images_to_build(images, include, exclude) - else: - assert calculate_images_to_build(images, include, exclude) == expected - - @pytest.mark.parametrize( "version,min_version,max_version,expected", [