-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdriver.py
More file actions
146 lines (124 loc) · 8.14 KB
/
driver.py
File metadata and controls
146 lines (124 loc) · 8.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import argparse
import os
import random
import json
from src.main_pipeline import main as single_run
from src.global_utils import config
from src.global_utils.global_utils import create_dir, create_log_dirs
from src.swarm import main as swarm_test
from src.swarm import run_with_configs as swarm_rerun
from src.experiments import main as experiment_main
def expand_path(path):
return os.path.abspath(os.path.expanduser(path))
def create_log_and_test_dirs(directories):
if "CONFIGS_DIR" in directories:
create_dir(directories["CONFIGS_DIR"])
create_log_dirs(directories["LOG_DIR"], directories["ONGOING_LOG_DIR"])
current_testdir = directories["TEST_DIR"]
create_dir(current_testdir)
def start_single_pipeline_run(config_path, overrides):
"""
Start the testing pipeline for a given config json file.
"""
# init config
config.initialize(config_path)
for k,v in overrides.items():
config.set(k,v)
directories = {
"LOG_DIR": config.get("LOG_DIR"),
"ONGOING_LOG_DIR": config.get("ONGOING_LOG_DIR"),
"TEST_DIR": config.get("TEST_DIR"),
}
create_log_and_test_dirs(directories)
# start the pipeline
single_run()
def start_swarm_test_run(directories, p, swarm_random_seed, swarm_config_template_path):
create_log_and_test_dirs(directories)
# extra log dir for swarm test overview log
swarm_tests_dir = os.path.join(directories["LOG_DIR"], "swarm_tests")
create_dir(swarm_tests_dir)
with open(swarm_config_template_path, "r") as config_template:
templ = json.load(config_template)
swarm_test(directories, p, swarm_random_seed, templ)
def start_rerun(configs_path, directories, p):
create_log_and_test_dirs(directories)
swarm_rerun(configs_path, directories, p)
if __name__ == "__main__":
parser = argparse.ArgumentParser("driver.py")
# single: single pipeline run with a given config.
# swarm: start swarm testing. Automatically generates configs
# rerun: rerun all configs in a given directory
# experiment: run an experiment as specified in a provided experiment config file
subparsers = parser.add_subparsers(dest="mode", help="single, swarm or rerun", required=True)
# parent parser for all common args
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument("--log_dir", help="Directory for storing logs.", type=str, required=False, default="logs")
parent_parser.add_argument("--out_dir", help="Directory for temporarily storing test programs.", type=str, required=False, default="out")
parent_parser.add_argument("--mpspdz_root_dir", help="Root directory of MP-SPDZ.", type=str, required=True)
parent_parser.add_argument("--emp_root_dir", help="Root directory of EMP.", type=str, required=True)
parent_parser.add_argument("--silph_root_dir", help="Root directory of the Silph Compiler.", type=str, required=True)
parent_parser.add_argument("--ezpc_root_dir", help="Root directory of the EzPC Compiler.", type=str, required=True)
parent_parser.add_argument("--temp_log_dir", help="Directory for temporary logs of ongoing processes.", type=str, required=False, default="logs/ongoing")
parent_parser.add_argument("--configs_dir", help="Directory for temporarily storing generated configs.", type=str, required=False, default="configs")
# parser for single run. Only needs the path of the config to use
single_parser = subparsers.add_parser("single", help="Single run with a given config.")
single_parser.add_argument("-c", help="for single run: path to config", type=str, required=True)
single_parser.add_argument("--out", help="for single run: path to output dir for programs and logs", type=str, required=True)
# parser for common args of swarm and reruns
process_parser = argparse.ArgumentParser(add_help=False)
process_parser.add_argument("-p", help="Number of Processes to run in parallel. -1 for all maximum", type=int, required=False, default=-1)
swarm_config_template_path = "src/swarm/swarm_config.json"
# parser for swarm run. Additionally takes an optional random seed
swarm_parser = subparsers.add_parser("swarm", help="Start swarm testing, auto-generates configs.", parents=[parent_parser, process_parser])
swarm_parser.add_argument("--rand", help="Random seed for this run. If not given, use a random one", type=int, required=False, default=None)
# parser for reruns. Additionally takes the path to the directory containing all configs to be retried
rerun_parser = subparsers.add_parser("rerun", help="Rerun all configs in a given directory", parents=[parent_parser, process_parser])
rerun_parser.add_argument("--prepared_configs", help="Path to config JSON files directory", type=str, required=True)
# parser for experiments
experiment_parser = subparsers.add_parser("experiment", help="Run an experiment as specified in a provided config", parents=[process_parser])
experiment_parser.add_argument("--temp_dir", help="for experiment run: path to temporary directory", type=str, required=True)
experiment_parser.add_argument("--compilers_dir", help="for experiment run: path to compilers for TTB experiments", type=str, required=False, default=None)
experiment_parser.add_argument("--log_dir", help="Directory for storing overall experiment logs.", type=str, required=True)
experiment_parser.add_argument("-c", help="for experiment run: path to directory containing configs", type=str, required=True)
experiment_parser.add_argument("--rand", help="Random seed for this run.", type=int, required=True)
args = parser.parse_args()
if args.mode == "single":
# in single mode, we store all logs and generated program in the out dir
overrides = {
"ONGOING_LOG_DIR": expand_path(args.out),
"LOG_DIR": expand_path(args.out),
"TEST_DIR": expand_path(args.out),
"KEEP_LOGS": True,
"HUMAN_READABLE_IR": True,
"KEEP_PROGRAMS": True,
}
start_single_pipeline_run(args.c, overrides)
elif args.mode == "experiment":
experiment_main(
experiment_config_path=args.c,
swarm_config_template_path=swarm_config_template_path,
temp_dir=args.temp_dir,
log_dir=args.log_dir,
p=args.p,
random_seed=args.rand,
compilers_dir=args.compilers_dir
)
else:
directories = {
"ONGOING_LOG_DIR": expand_path(args.temp_log_dir),
"LOG_DIR": expand_path(args.log_dir),
"TEST_DIR": expand_path(args.out_dir),
"MPSPDZ_ROOT_DIR": expand_path(args.mpspdz_root_dir),
"EMP_ROOT_DIR": expand_path(args.emp_root_dir),
"SILPH_ROOT_DIR": expand_path(args.silph_root_dir),
"EZPC_ROOT_DIR": expand_path(args.ezpc_root_dir),
}
# if we have a directory full of config files, process them. Otherwise generate random configs
if args.mode == "rerun":
start_rerun(configs_path=expand_path(args.prepared_configs), directories=directories, p=args.p)
elif args.mode == "swarm":
swarm_random_seed = args.rand or random.randint(0, 4242424242)
directories["CONFIGS_DIR"] = expand_path(args.configs_dir)
start_swarm_test_run(directories=directories, p=args.p, swarm_random_seed = swarm_random_seed, swarm_config_template_path=swarm_config_template_path)
else:
raise Exception(f"Invalid mode: {args.mode}")