Skip to content

Move run_python.sh to scripts/dev/ #269

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .evergreen-functions.yml
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ functions:
add_to_path:
- ${workdir}/bin
working_dir: src/github.com/mongodb/mongodb-kubernetes
binary: scripts/evergreen/run_python.sh scripts/update_supported_dockerfiles.py
binary: scripts/dev/run_python.sh scripts/update_supported_dockerfiles.py
- command: subprocess.exec
type: setup
params:
Expand Down Expand Up @@ -486,7 +486,7 @@ functions:
include_expansions_in_env:
- image_version
- rh_pyxis
binary: scripts/evergreen/run_python.sh scripts/preflight_images.py --image ${image_name} --submit "${preflight_submit}"
binary: scripts/dev/run_python.sh scripts/preflight_images.py --image ${image_name} --submit "${preflight_submit}"

build_multi_cluster_binary:
- command: subprocess.exec
Expand Down Expand Up @@ -538,7 +538,7 @@ functions:
shell: bash
<<: *e2e_include_expansions_in_env
working_dir: src/github.com/mongodb/mongodb-kubernetes
binary: scripts/evergreen/run_python.sh pipeline.py --include ${image_name} --parallel --sign
binary: scripts/dev/run_python.sh pipeline.py --include ${image_name} --parallel --sign

teardown_cloud_qa_all:
- *switch_context
Expand All @@ -549,7 +549,7 @@ functions:
working_dir: src/github.com/mongodb/mongodb-kubernetes
script: |
source .generated/context.export.env
scripts/evergreen/run_python.sh scripts/evergreen/e2e/setup_cloud_qa.py delete_all
scripts/dev/run_python.sh scripts/evergreen/e2e/setup_cloud_qa.py delete_all

# Updates current expansions with variables from release.json file.
# Use e.g. ${mongoDbOperator} afterwards.
Expand Down Expand Up @@ -596,7 +596,7 @@ functions:
add_to_path:
- ${workdir}/bin
# Below script deletes agent images created for an Evergreen patch older than 1 day
command: scripts/evergreen/run_python.sh scripts/evergreen/periodic-cleanup-aws.py
command: scripts/dev/run_python.sh scripts/evergreen/periodic-cleanup-aws.py

### Test Functions ###

Expand Down Expand Up @@ -695,7 +695,7 @@ functions:
working_dir: src/github.com/mongodb/mongodb-kubernetes
script: |
source .generated/context.export.env
scripts/evergreen/run_python.sh scripts/evergreen/e2e/performance/create_variants.py ${variant} ${size}> evergreen_tasks.json
scripts/dev/run_python.sh scripts/evergreen/e2e/performance/create_variants.py ${variant} ${size}> evergreen_tasks.json
echo "tasks to run:"
cat evergreen_tasks.json
- command: generate.tasks
Expand Down
38 changes: 19 additions & 19 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,13 @@ operator: configure-operator build-and-push-operator-image

# build-push, (todo) restart database
database: aws_login
@ scripts/evergreen/run_python.sh pipeline.py --include database
@ scripts/dev/run_python.sh pipeline.py --include database

readiness_probe: aws_login
@ scripts/evergreen/run_python.sh pipeline.py --include readiness-probe
@ scripts/dev/run_python.sh pipeline.py --include readiness-probe

upgrade_hook: aws_login
@ scripts/evergreen/run_python.sh pipeline.py --include upgrade-hook
@ scripts/dev/run_python.sh pipeline.py --include upgrade-hook

# ensures cluster is up, cleans Kubernetes + OM, build-push-deploy operator,
# push-deploy database, create secrets, config map, resources etc
Expand All @@ -90,7 +90,7 @@ full: build-and-push-images

# build-push appdb image
appdb: aws_login
@ scripts/evergreen/run_python.sh pipeline.py --include appdb
@ scripts/dev/run_python.sh pipeline.py --include appdb

# runs the e2e test: make e2e test=e2e_sharded_cluster_pv. The Operator is redeployed before the test, the namespace is cleaned.
# The e2e test image is built and pushed together with all main ones (operator, database, init containers)
Expand All @@ -112,7 +112,7 @@ mco-e2e: aws_login build-and-push-mco-test-image

generate-env-file: ## generates a local-test.env for local testing
mkdir -p .generated
{ scripts/evergreen/run_python.sh mongodb-community-operator/scripts/dev/get_e2e_env_vars.py ".generated/config.json" | tee >(cut -d' ' -f2 > .generated/mco-test.env) ;} > .generated/mco-test.export.env
{ scripts/dev/run_python.sh mongodb-community-operator/scripts/dev/get_e2e_env_vars.py ".generated/config.json" | tee >(cut -d' ' -f2 > .generated/mco-test.env) ;} > .generated/mco-test.export.env
. .generated/mco-test.export.env

reset-helm-leftovers: ## sometimes you didn't cleanly uninstall a helm release, this cleans the existing helm artifacts
Expand Down Expand Up @@ -154,19 +154,19 @@ aws_cleanup:
@ scripts/evergreen/prepare_aws.sh

build-and-push-operator-image: aws_login
@ scripts/evergreen/run_python.sh pipeline.py --include operator-quick
@ scripts/dev/run_python.sh pipeline.py --include operator-quick

build-and-push-database-image: aws_login
@ scripts/dev/build_push_database_image

build-and-push-test-image: aws_login build-multi-cluster-binary
@ if [[ -z "$(local)" ]]; then \
scripts/evergreen/run_python.sh pipeline.py --include test; \
scripts/dev/run_python.sh pipeline.py --include test; \
fi

build-and-push-mco-test-image: aws_login
@ if [[ -z "$(local)" ]]; then \
scripts/evergreen/run_python.sh pipeline.py --include mco-test; \
scripts/dev/run_python.sh pipeline.py --include mco-test; \
fi

build-multi-cluster-binary:
Expand All @@ -181,27 +181,27 @@ build-and-push-images: build-and-push-operator-image appdb-init-image om-init-im
build-and-push-init-images: appdb-init-image om-init-image database-init-image

database-init-image:
@ scripts/evergreen/run_python.sh pipeline.py --include init-database
@ scripts/dev/run_python.sh pipeline.py --include init-database

appdb-init-image:
@ scripts/evergreen/run_python.sh pipeline.py --include init-appdb
@ scripts/dev/run_python.sh pipeline.py --include init-appdb

# Not setting a parallel-factor will default to 0 which will lead to using all CPUs, that can cause docker to die.
# Here we are defaulting to 6, a higher value might work for you.
agent-image:
@ scripts/evergreen/run_python.sh pipeline.py --include agent --all-agents --parallel --parallel-factor 6
@ scripts/dev/run_python.sh pipeline.py --include agent --all-agents --parallel --parallel-factor 6

agent-image-slow:
@ scripts/evergreen/run_python.sh pipeline.py --include agent --parallel-factor 1
@ scripts/dev/run_python.sh pipeline.py --include agent --parallel-factor 1

operator-image:
@ scripts/evergreen/run_python.sh pipeline.py --include operator
@ scripts/dev/run_python.sh pipeline.py --include operator

om-init-image:
@ scripts/evergreen/run_python.sh pipeline.py --include init-ops-manager
@ scripts/dev/run_python.sh pipeline.py --include init-ops-manager

om-image:
@ scripts/evergreen/run_python.sh pipeline.py --include ops-manager
@ scripts/dev/run_python.sh pipeline.py --include ops-manager

configure-operator:
@ scripts/dev/configure_operator.sh
Expand Down Expand Up @@ -284,16 +284,16 @@ golang-tests-race:
USE_RACE=true scripts/evergreen/unit-tests.sh

sbom-tests:
@ scripts/evergreen/run_python.sh -m pytest generate_ssdlc_report_test.py
@ scripts/dev/run_python.sh -m pytest generate_ssdlc_report_test.py

# e2e tests are also in python and we will need to ignore them as they are in the docker/mongodb-kubernetes-tests folder
# additionally, we have one lib which we want to test which is in the =docker/mongodb-kubernetes-tests folder.
python-tests:
@ scripts/evergreen/run_python.sh -m pytest docker/mongodb-kubernetes-tests/kubeobject
@ scripts/evergreen/run_python.sh -m pytest --ignore=docker/mongodb-kubernetes-tests
@ scripts/dev/run_python.sh -m pytest docker/mongodb-kubernetes-tests/kubeobject
@ scripts/dev/run_python.sh -m pytest --ignore=docker/mongodb-kubernetes-tests

generate-ssdlc-report:
@ scripts/evergreen/run_python.sh generate_ssdlc_report.py
@ scripts/dev/run_python.sh generate_ssdlc_report.py

# test-race runs golang test with race enabled
test-race: generate fmt vet manifests golang-tests-race
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion scripts/dev/switch_context_by_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ main() {
find_variant_arg="--task-name"
fi

if ! contexts=$(scripts/evergreen/run_python.sh scripts/python/find_test_variants.py "${find_variant_arg}" "${test}"); then
if ! contexts=$(scripts/dev/run_python.sh scripts/python/find_test_variants.py "${find_variant_arg}" "${test}"); then
echo "Couldn't find any test contexts running test: ${test}"
echo "${contexts}"
exit 1
Expand Down
2 changes: 1 addition & 1 deletion scripts/evergreen/e2e/e2e.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ run_e2e_mco_tests() {
docker exec kind-control-plane mkdir -p /opt/data/mongo-data-{0..2} /opt/data/mongo-logs-{0..2}

set +e # let's not fail here, such that we can still dump all information
scripts/evergreen/run_python.sh mongodb-community-operator/scripts/dev/e2e.py --test "${TEST_NAME}" --distro ubi --cluster-wide "${cluster_wide}"
scripts/dev/run_python.sh mongodb-community-operator/scripts/dev/e2e.py --test "${TEST_NAME}" --distro ubi --cluster-wide "${cluster_wide}"
local test_results=$?
set -e

Expand Down