You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I'm trying to run the mlperf script on windows using docker through github bash. Downloading https://zenodo.org/records/4735647/files/resnet50_v1.onnx file from the console is taking more than 15mins and throwing an error after that.
However, if I try to download it manually in the browser through 'https' mode, it takes less than 1 min to download and through 'http' mode, it took around 3 mins to download.
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in run_code
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Scripts\mlcr.exe_main.py", line 7, in
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1670, in mlcr
main()
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1752, in main
res = method(run_args)
^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1491, in call_script_module_function
result = automation_instance.run(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 225, in run
r = self._run(i)
^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 1772, in _run
r = customize_code.preprocess(ii)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\run-mlperf-inference-app\customize.py", line 286, in preprocess
r = mlc.access(ii)
^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1508, in docker
return self.call_script_module_function("docker", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1493, in call_script_module_function
result = automation_instance.docker(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 4691, in docker
return docker_run(self, i)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\docker.py", line 381, in docker_run
r = self_module.action_object.access(mlc_docker_input)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1491, in call_script_module_function
result = automation_instance.run(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 225, in run
r = self._run(i)
^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 1842, in _run
r = self._call_run_deps(prehook_deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 3532, in _call_run_deps
r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 3702, in _run_deps
r = self.action_object.access(ii)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1501, in call_script_module_function
raise ScriptExecutionError(f"Script {function_name} execution failed. Error : {error}")
mlc.main.ScriptExecutionError: Script run execution failed. Error : MLC script failed (name = build-docker-image, return code = 1)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Please file an issue at https://github.com/mlcommons/mlperf-automations/issues along with the full MLC command being run and the relevant
or full console log.
The text was updated successfully, but these errors were encountered:
I'm trying to run the mlperf script on windows using docker through github bash. Downloading https://zenodo.org/records/4735647/files/resnet50_v1.onnx file from the console is taking more than 15mins and throwing an error after that.
However, if I try to download it manually in the browser through 'https' mode, it takes less than 1 min to download and through 'http' mode, it took around 3 mins to download.
$ mlcr run-mlperf,inference,_find-performance,_full,_r5.0-dev --model=resnet50 --implementation=reference --framework=onnxruntime --category=edge --scenario=Offline --execution_mode=test --device=cpu --docker --quiet --test_query_count=1000 --use_dataset_from_host=yes --download_dataset_to_host=yes
[2025-02-11 21:08:38,432 module.py:560 INFO] - * mlcr run-mlperf,inference,_find-performance,_full,_r5.0-dev
[2025-02-11 21:08:38,443 module.py:560 INFO] - * mlcr get,mlcommons,inference,src
[2025-02-11 21:08:38,445 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-mlperf-inference-src_0d343849\mlc-cached-state.json
[2025-02-11 21:08:38,449 module.py:560 INFO] - * mlcr get,mlperf,inference,results,dir,_version.r5.0-dev
[2025-02-11 21:08:38,451 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-mlperf-inference-results-dir_b14e6733\mlc-cached-state.json
[2025-02-11 21:08:38,455 module.py:560 INFO] - * mlcr install,pip-package,for-mlc-python,_package.tabulate
[2025-02-11 21:08:38,456 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\install-pip-package-for-mlc-python_abab6956\mlc-cached-state.json
[2025-02-11 21:08:38,459 module.py:560 INFO] - * mlcr get,mlperf,inference,utils
[2025-02-11 21:08:38,471 module.py:560 INFO] - * mlcr get,mlperf,inference,src
[2025-02-11 21:08:38,473 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-mlperf-inference-src_0d343849\mlc-cached-state.json
[2025-02-11 21:08:38,476 module.py:5481 INFO] - ! call "postprocess" from C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\get-mlperf-inference-utils\customize.py
Using MLCommons Inference source from C:\Users\CSEMA\MLC\repos\local\cache\get-git-repo_37fd06ba\inference
Running loadgen scenario: Offline and mode: performance
[2025-02-11 21:08:38,573 module.py:560 INFO] - * mlcr build,dockerfile
[2025-02-11 21:08:38,578 module.py:560 INFO] - * mlcr get,docker
[2025-02-11 21:08:38,579 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-docker_9e55fd95\mlc-cached-state.json
mlc pull repo && mlcr --tags=app,mlperf,inference,generic,_reference,_resnet50,_onnxruntime,_cpu,_test,_r5.0-dev_default,_offline --quiet=true --env.MLC_QUIET=yes --env.MLC_WINDOWS=yes --env.MLC_MLPERF_IMPLEMENTATION=reference --env.MLC_MLPERF_MODEL=resnet50 --env.MLC_MLPERF_RUN_STYLE=test --env.MLC_MLPERF_SKIP_SUBMISSION_GENERATION=False --env.MLC_DOCKER_PRIVILEGED_MODE=True --env.MLC_MLPERF_SUBMISSION_DIVISION=open --env.MLC_MLPERF_INFERENCE_TP_SIZE=1 --env.MLC_MLPERF_SUBMISSION_SYSTEM_TYPE=edge --env.MLC_MLPERF_DEVICE=cpu --env.MLC_MLPERF_USE_DOCKER=True --env.MLC_MLPERF_BACKEND=onnxruntime --env.MLC_MLPERF_LOADGEN_SCENARIO=Offline --env.MLC_TEST_QUERY_COUNT=1000 --env.MLC_USE_DATASET_FROM_HOST=yes --env.MLC_MLPERF_FIND_PERFORMANCE_MODE=yes --env.MLC_MLPERF_LOADGEN_ALL_MODES=no --env.MLC_MLPERF_LOADGEN_MODE=performance --env.MLC_MLPERF_RESULT_PUSH_TO_GITHUB=False --env.MLC_MLPERF_SUBMISSION_GENERATION_STYLE=full --env.MLC_MLPERF_INFERENCE_VERSION=5.0-dev --env.MLC_RUN_MLPERF_INFERENCE_APP_DEFAULTS=r5.0-dev_default --env.MLC_MLPERF_SUBMISSION_CHECKER_VERSION=v5.0 --env.MLC_MLPERF_INFERENCE_SOURCE_VERSION=5.0.15 --env.MLC_MLPERF_LAST_RELEASE=v5.0 --env.MLC_MLPERF_INFERENCE_RESULTS_VERSION=r5.0-dev --env.MLC_MODEL=resnet50 --env.MLC_MLPERF_LOADGEN_COMPLIANCE=no --env.MLC_MLPERF_LOADGEN_EXTRA_OPTIONS= --env.MLC_MLPERF_LOADGEN_SCENARIOS,=Offline --env.MLC_MLPERF_LOADGEN_MODES,=performance --env.MLC_OUTPUT_FOLDER_NAME=test_results --add_deps_recursive.coco2014-original.tags=_full --add_deps_recursive.coco2014-preprocessed.tags=_full --add_deps_recursive.imagenet-original.tags=_full --add_deps_recursive.imagenet-preprocessed.tags=_full --add_deps_recursive.openimages-original.tags=_full --add_deps_recursive.openimages-preprocessed.tags=_full --add_deps_recursive.openorca-original.tags=_full --add_deps_recursive.openorca-preprocessed.tags=_full --add_deps_recursive.coco2014-dataset.tags=_full --add_deps_recursive.igbh-dataset.tags=_full --add_deps_recursive.get-mlperf-inference-results-dir.tags=_version.r5.0-dev --add_deps_recursive.get-mlperf-inference-submission-dir.tags=_version.r5.0-dev --add_deps_recursive.mlperf-inference-nvidia-scratch-space.tags=_version.r5.0-dev --v=False --print_env=False --print_deps=False --dump_version_info=True --quiet
Dockerfile written at C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\app-mlperf-inference\dockerfiles\ubuntu_22.04.Dockerfile
[2025-02-11 21:08:38,657 docker.py:191 INFO] - Dockerfile generated at C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\app-mlperf-inference\dockerfiles\ubuntu_22.04.Dockerfile
[2025-02-11 21:08:38,741 module.py:560 INFO] - * mlcr get,docker
[2025-02-11 21:08:38,743 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-docker_9e55fd95\mlc-cached-state.json
[2025-02-11 21:08:38,752 module.py:560 INFO] - * mlcr get,mlperf,inference,submission,dir,local,_version.r5.0-dev
[2025-02-11 21:08:38,754 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-mlperf-inference-submission-dir_c73e05d3\mlc-cached-state.json
[2025-02-11 21:08:38,762 module.py:560 INFO] - * mlcr get,dataset,imagenet,validation,original,_full
[2025-02-11 21:08:38,764 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-dataset-imagenet-val_2b43072a\mlc-cached-state.json
[2025-02-11 21:08:38,772 module.py:560 INFO] - * mlcr run,docker,container
[2025-02-11 21:08:38,778 module.py:560 INFO] - * mlcr get,docker
[2025-02-11 21:08:38,780 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-docker_9e55fd95\mlc-cached-state.json
Checking existing Docker container:
docker ps --format "{{ .ID }}," --filter "ancestor=localhost/local/mlc-script-app-mlperf-inference-generic--reference--resnet50--onnxruntime--cpu--test--r5.0-dev-default--offline:ubuntu-22.04-latest" 2> nul
Checking Docker images:
docker images -q localhost/local/mlc-script-app-mlperf-inference-generic--reference--resnet50--onnxruntime--cpu--test--r5.0-dev-default--offline:ubuntu-22.04-latest 2> nul
[2025-02-11 21:08:39,322 module.py:560 INFO] - * mlcr build,docker,image
[2025-02-11 21:08:39,333 module.py:560 INFO] - * mlcr get,docker
[2025-02-11 21:08:39,337 module.py:1274 INFO] - ! load C:\Users\CSEMA\MLC\repos\local\cache\get-docker_9e55fd95\mlc-cached-state.json
docker build -f "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\app-mlperf-inference\dockerfiles\ubuntu_22.04.Dockerfile" -t "localhost/local/mlc-script-app-mlperf-inference-generic--reference--resnet50--onnxruntime--cpu--test--r5.0-dev-default--offline:ubuntu-22.04-latest" .
[2025-02-11 21:08:39,342 module.py:5334 INFO] - ! cd C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\app-mlperf-inference\dockerfiles
[2025-02-11 21:08:39,342 module.py:5335 INFO] - ! call C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\build-docker-image\run.bat from tmp-run.bat
137.7 [2025-02-11 15:41:01,586 module.py:1309 DEBUG] - - Checking posthook dependencies on other MLC scripts:
137.7 [2025-02-11 15:41:01,586 module.py:1322 DEBUG] - - Checking post dependencies on other MLC scripts:
137.7 [2025-02-11 15:41:01,587 module.py:2192 INFO] - - running time of script "get,python,python3,get-python,get-python3": 0.01 sec.
137.7 [2025-02-11 15:41:01,587 module.py:2220 INFO] - Path to Python: /home/mlcuser/venv/mlc/bin/python3
137.7 [2025-02-11 15:41:01,587 module.py:2220 INFO] - Python version: 3.10.12
137.7 [2025-02-11 15:41:01,587 module.py:1637 DEBUG] - - Processing env after dependencies ...
137.7 [2025-02-11 15:41:01,588 module.py:1759 DEBUG] - - Running preprocess ...
137.7 [2025-02-11 15:41:01,593 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_03833751" ...
137.7 [2025-02-11 15:41:01,593 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_03833751
137.7 [2025-02-11 15:41:01,593 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh from tmp-run.sh
137.8 [2025-02-11 15:41:01,684 module.py:5481 INFO] - ! call "detect_version" from /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/customize.py
137.8 Detected version: 22.0.2
137.8 [2025-02-11 15:41:01,690 module.py:1838 DEBUG] - - Checking prehook dependencies on other MLC scripts:
137.8 [2025-02-11 15:41:01,690 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_03833751" ...
137.8 [2025-02-11 15:41:01,690 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_03833751
137.8 [2025-02-11 15:41:01,690 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh from tmp-run.sh
137.9 [2025-02-11 15:41:01,752 module.py:5481 INFO] - ! call "postprocess" from /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/customize.py
137.9 [2025-02-11 15:41:01,752 module.py:5544 DEBUG] - - Running postprocess ...
137.9 [2025-02-11 15:41:01,766 module.py:2006 DEBUG] - - Removing tmp tag in the script cached output 03833751fe7d498e ...
137.9 [2025-02-11 15:41:01,767 module.py:2150 INFO] - - cache UID: 03833751fe7d498e
137.9 [2025-02-11 15:41:01,767 module.py:2192 INFO] - - running time of script "get,install,generic,pip-package,generic-python-lib": 0.39 sec.
137.9 [2025-02-11 15:41:01,770 module.py:1637 DEBUG] - - Processing env after dependencies ...
137.9 [2025-02-11 15:41:01,771 module.py:1759 DEBUG] - - Running preprocess ...
137.9 [2025-02-11 15:41:01,777 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220" ...
137.9 [2025-02-11 15:41:01,777 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220
137.9 [2025-02-11 15:41:01,777 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh from tmp-run.sh
138.0 [2025-02-11 15:41:01,831 module.py:5481 INFO] - ! call "detect_version" from /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/customize.py
138.0 [2025-02-11 15:41:01,837 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/install.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220" ...
138.0 [2025-02-11 15:41:01,837 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220
138.0 [2025-02-11 15:41:01,838 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/install.sh from tmp-run.sh
138.0
138.0 Extra PIP CMD:
138.0
138.0
138.0 /home/mlcuser/venv/mlc/bin/python3 -m pip install "onnxruntime"
140.5 Collecting onnxruntime
141.7 Downloading onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl (13.3 MB)
150.7 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 13.3/13.3 MB 1.8 MB/s eta 0:00:00
150.9 Collecting flatbuffers
151.2 Downloading flatbuffers-25.2.10-py2.py3-none-any.whl (30 kB)
151.5 Collecting sympy
151.6 Downloading sympy-1.13.3-py3-none-any.whl (6.2 MB)
155.6 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 6.2/6.2 MB 1.5 MB/s eta 0:00:00
156.5 Collecting protobuf
156.6 Downloading protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl (319 kB)
156.7 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 319.7/319.7 KB 1.8 MB/s eta 0:00:00
156.7 Requirement already satisfied: numpy>=1.21.6 in /home/mlcuser/venv/mlc/lib/python3.10/site-packages (from onnxruntime) (2.2.2)
156.9 Collecting packaging
157.1 Downloading packaging-24.2-py3-none-any.whl (65 kB)
157.2 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 65.5/65.5 KB 890.3 kB/s eta 0:00:00
157.2 Collecting coloredlogs
157.4 Downloading coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)
157.4 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 46.0/46.0 KB 4.2 MB/s eta 0:00:00
157.6 Collecting humanfriendly>=9.1
157.6 Downloading humanfriendly-10.0-py2.py3-none-any.whl (86 kB)
157.8 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 86.8/86.8 KB 1.2 MB/s eta 0:00:00
157.9 Collecting mpmath<1.4,>=1.1.0
157.9 Downloading mpmath-1.3.0-py3-none-any.whl (536 kB)
158.4 ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 536.2/536.2 KB 1.1 MB/s eta 0:00:00
158.6 Installing collected packages: mpmath, flatbuffers, sympy, protobuf, packaging, humanfriendly, coloredlogs, onnxruntime
164.5 Successfully installed coloredlogs-15.0.1 flatbuffers-25.2.10 humanfriendly-10.0 mpmath-1.3.0 onnxruntime-1.20.1 packaging-24.2 protobuf-5.29.3 sympy-1.13.3
164.6 [2025-02-11 15:41:28,496 module.py:1838 DEBUG] - - Checking prehook dependencies on other MLC scripts:
164.6 [2025-02-11 15:41:28,497 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220" ...
164.6 [2025-02-11 15:41:28,497 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/get-generic-python-lib_e36e8220
164.6 [2025-02-11 15:41:28,497 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/run.sh from tmp-run.sh
164.7 [2025-02-11 15:41:28,556 module.py:5481 INFO] - ! call "postprocess" from /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-generic-python-lib/customize.py
164.7 [2025-02-11 15:41:28,556 module.py:5544 DEBUG] - - Running postprocess ...
164.7 Detected version: 1.20.1
164.7 [2025-02-11 15:41:28,565 module.py:2006 DEBUG] - - Removing tmp tag in the script cached output e36e82202bac45a5 ...
164.7 [2025-02-11 15:41:28,572 module.py:2150 INFO] - - cache UID: e36e82202bac45a5
164.7 [2025-02-11 15:41:28,573 module.py:2192 INFO] - - running time of script "get,install,generic,pip-package,generic-python-lib": 27.43 sec.
164.8 [2025-02-11 15:41:28,615 module.py:560 INFO] - * mlcr get,ml-model,image-classification,resnet50,raw,_onnx,_fp32
164.8 [2025-02-11 15:41:28,615 module.py:592 DEBUG] - - Number of scripts found: 1
164.8 [2025-02-11 15:41:28,615 module.py:654 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,get,ml-model,image-classification,resnet50,raw,_onnx,_fp32
164.8 [2025-02-11 15:41:28,616 module.py:667 DEBUG] - - Number of cached script outputs found: 0
164.8 [2025-02-11 15:41:28,616 module.py:816 DEBUG] - - Found script::get-ml-model-resnet50, 56203e4e998b4bc0 in /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/get-ml-model-resnet50
164.8 [2025-02-11 15:41:28,616 module.py:2406 DEBUG] - Prepared variations: _onnx,_fp32,_opset-11,_argmax
164.8 [2025-02-11 15:41:28,618 module.py:4817 DEBUG] - - Checking if script execution is already cached ...
164.8 [2025-02-11 15:41:28,618 module.py:4850 DEBUG] - - Prepared explicit variations: _onnx,_fp32
164.8 [2025-02-11 15:41:28,618 module.py:4869 DEBUG] - - Prepared variations: _onnx,_fp32
164.8 [2025-02-11 15:41:28,618 module.py:4908 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,get,ml-model,image-classification,resnet50,raw,ml-model-resnet50,_onnx,_fp32
164.8 [2025-02-11 15:41:28,619 module.py:1369 DEBUG] - - Creating new "cache" script artifact in the MLC local repository ...
164.8 [2025-02-11 15:41:28,619 module.py:1372 DEBUG] - - Tags: tmp,get,ml-model,image-classification,resnet50,raw,ml-model-resnet50,_onnx,_fp32,script-item-56203e4e998b4bc0
164.8 [2025-02-11 15:41:28,621 module.py:1400 DEBUG] - - Changing to /home/mlcuser/MLC/repos/local/cache/get-ml-model-resnet50_b44eabb6
164.8 [2025-02-11 15:41:28,622 module.py:1759 DEBUG] - - Running preprocess ...
164.8 [2025-02-11 15:41:28,626 module.py:1838 DEBUG] - - Checking prehook dependencies on other MLC scripts:
164.8 [2025-02-11 15:41:28,640 module.py:560 INFO] - * mlcr download-and-extract,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,640 module.py:592 DEBUG] - - Number of scripts found: 1
164.8 [2025-02-11 15:41:28,640 module.py:654 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,download-and-extract,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,640 module.py:667 DEBUG] - - Number of cached script outputs found: 0
164.8 [2025-02-11 15:41:28,640 module.py:816 DEBUG] - - Found script::download-and-extract, c67e81a4ce2649f5 in /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/download-and-extract
164.8 [2025-02-11 15:41:28,640 module.py:2406 DEBUG] - Prepared variations: _url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx,_cmutil,_keep
164.8 [2025-02-11 15:41:28,641 module.py:4817 DEBUG] - - Checking if script execution is already cached ...
164.8 [2025-02-11 15:41:28,642 module.py:4850 DEBUG] - - Prepared explicit variations: _url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,642 module.py:4869 DEBUG] - - Prepared variations: _url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,642 module.py:4908 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,download-and-extract,dae,file,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx,ml-model,resnet50,raw,ml-model-resnet50,_onnx
164.8 [2025-02-11 15:41:28,642 module.py:1369 DEBUG] - - Creating new "cache" script artifact in the MLC local repository ...
164.8 [2025-02-11 15:41:28,642 module.py:1372 DEBUG] - - Tags: tmp,download-and-extract,dae,file,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx,ml-model,resnet50,raw,ml-model-resnet50,_onnx,script-item-c67e81a4ce2649f5
164.8 [2025-02-11 15:41:28,644 module.py:1400 DEBUG] - - Changing to /home/mlcuser/MLC/repos/local/cache/download-and-extract_7435d633
164.8 [2025-02-11 15:41:28,645 module.py:1759 DEBUG] - - Running preprocess ...
164.8 [2025-02-11 15:41:28,649 module.py:1838 DEBUG] - - Checking prehook dependencies on other MLC scripts:
164.8 [2025-02-11 15:41:28,660 module.py:560 INFO] - * mlcr download,file,_cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,660 module.py:592 DEBUG] - - Number of scripts found: 1
164.8 [2025-02-11 15:41:28,660 module.py:654 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,download,file,_cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,660 module.py:667 DEBUG] - - Number of cached script outputs found: 0
164.8 [2025-02-11 15:41:28,660 module.py:816 DEBUG] - - Found script::download-file, 9cdc8dc41aae437e in /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/download-file
164.8 [2025-02-11 15:41:28,661 module.py:2406 DEBUG] - Prepared variations: _cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,661 module.py:4817 DEBUG] - - Checking if script execution is already cached ...
164.8 [2025-02-11 15:41:28,661 module.py:4850 DEBUG] - - Prepared explicit variations: _cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,662 module.py:4869 DEBUG] - - Prepared variations: _cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx
164.8 [2025-02-11 15:41:28,662 module.py:4908 DEBUG] - - Searching for cached script outputs with the following tags: -tmp,download,file,download-file,_cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx,ml-model,resnet50,raw,ml-model-resnet50,_onnx
164.8 [2025-02-11 15:41:28,662 module.py:1369 DEBUG] - - Creating new "cache" script artifact in the MLC local repository ...
164.8 [2025-02-11 15:41:28,662 module.py:1372 DEBUG] - - Tags: tmp,download,file,download-file,_cmutil,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx,ml-model,resnet50,raw,ml-model-resnet50,_onnx,script-item-9cdc8dc41aae437e
164.8 [2025-02-11 15:41:28,664 module.py:1400 DEBUG] - - Changing to /home/mlcuser/MLC/repos/local/cache/download-file_5ec6c03f
164.8 [2025-02-11 15:41:28,664 module.py:1628 DEBUG] - - Checking dependencies on other MLC scripts:
164.8 [2025-02-11 15:41:28,671 module.py:560 INFO] - * mlcr detect,os
164.8 [2025-02-11 15:41:28,671 module.py:592 DEBUG] - - Number of scripts found: 1
164.8 [2025-02-11 15:41:28,671 module.py:816 DEBUG] - - Found script::detect-os, 863735b7db8c44fc in /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/detect-os
164.8 [2025-02-11 15:41:28,673 module.py:1759 DEBUG] - - Running preprocess ...
164.8 [2025-02-11 15:41:28,677 module.py:5327 DEBUG] - - Running native script "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/detect-os/run.sh" from temporal script "tmp-run.sh" in "/home/mlcuser/MLC/repos/local/cache/download-file_5ec6c03f" ...
164.8 [2025-02-11 15:41:28,677 module.py:5334 INFO] - ! cd /home/mlcuser/MLC/repos/local/cache/download-file_5ec6c03f
164.8 [2025-02-11 15:41:28,677 module.py:5335 INFO] - ! call /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/detect-os/run.sh from tmp-run.sh
164.8 [2025-02-11 15:41:28,695 module.py:5481 INFO] - ! call "postprocess" from /home/mlcuser/MLC/repos/mlcommons@mlperf-automations/script/detect-os/customize.py
164.8 [2025-02-11 15:41:28,695 module.py:5544 DEBUG] - - Running postprocess ...
164.9 [2025-02-11 15:41:28,702 module.py:2192 INFO] - - running time of script "detect-os,detect,os,info": 0.04 sec.
164.9 [2025-02-11 15:41:28,702 module.py:1637 DEBUG] - - Processing env after dependencies ...
164.9 [2025-02-11 15:41:28,703 module.py:1759 DEBUG] - - Running preprocess ...
164.9
164.9 Downloading from https://zenodo.org/record/4735647/files/resnet50_v1.onnx
165.1 **Downloading to /home/mlcuser/MLC/repos/local/cache/download-file_5ec6c03f/resnet50_v1.onnx
165.1
Downloaded: 3%**Traceback (most recent call last):
406.9 File "/home/mlcuser/venv/mlc/bin/mlcr", line 8, in
406.9 sys.exit(mlcr())
406.9 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1631, in mlcr
407.0 main()
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1713, in main
407.0 res = method(run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1472, in run
407.0 return self.call_script_module_function("run", run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1452, in call_script_module_function
407.0 result = automation_instance.run(run_args) # Pass args to the run method
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 225, in run
407.0 r = self._run(i)
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 1842, in _run
407.0 r = self._call_run_deps(prehook_deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3532, in _call_run_deps
407.0 r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3702, in _run_deps
407.0 r = self.action_object.access(ii)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 96, in access
407.0 result = method(self, options)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1472, in run
407.0 return self.call_script_module_function("run", run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1452, in call_script_module_function
407.0 result = automation_instance.run(run_args) # Pass args to the run method
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 225, in run
407.0 r = self._run(i)
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 1631, in _run
407.0 r = self._call_run_deps(deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3532, in _call_run_deps
407.0 r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3702, in _run_deps
407.0 r = self.action_object.access(ii)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 96, in access
407.0 result = method(self, options)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1472, in run
407.0 return self.call_script_module_function("run", run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1452, in call_script_module_function
407.0 result = automation_instance.run(run_args) # Pass args to the run method
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 225, in run
407.0 r = self._run(i)
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 1842, in _run
407.0 r = self._call_run_deps(prehook_deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3532, in _call_run_deps
407.0 r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3702, in _run_deps
407.0 r = self.action_object.access(ii)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 96, in access
407.0 result = method(self, options)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1472, in run
407.0 return self.call_script_module_function("run", run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1452, in call_script_module_function
407.0 result = automation_instance.run(run_args) # Pass args to the run method
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 225, in run
407.0 r = self._run(i)
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 1842, in _run
407.0 r = self._call_run_deps(prehook_deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3532, in _call_run_deps
407.0 r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
407.0 File "/home/mlcuser/MLC/repos/mlcommons@mlperf-automations/automation/script/module.py", line 3702, in _run_deps
407.0 r = self.action_object.access(ii)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 96, in access
407.0 result = method(self, options)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1472, in run
407.0 return self.call_script_module_function("run", run_args)
407.0 File "/home/mlcuser/venv/mlc/lib/python3.10/site-packages/mlc/main.py", line 1462, in call_script_module_function
407.0 raise ScriptExecutionError(f"Script {function_name} execution failed. Error : {error}")
407.0 mlc.main.ScriptExecutionError: Script run execution failed. Error : ('Connection broken: IncompleteRead(2959388 bytes read, 99200688 more expected)', IncompleteRead(2959388 bytes read, 99200688 more expected))
1 warning found (use docker --debug to expand):
ubuntu_22.04.Dockerfile:46
44 |
45 | # Run commands
46 | >>> RUN mlc pull repo && mlcr --tags=app,mlperf,inference,generic,_reference,_resnet50,_onnxruntime,_cpu,_test,_r5.0-dev_default,_offline --quiet=true --env.MLC_QUIET=yes --env.MLC_WINDOWS=yes --env.MLC_MLPERF_IMPLEMENTATION=reference --env.MLC_MLPERF_MODEL=resnet50 --env.MLC_MLPERF_RUN_STYLE=test --env.MLC_MLPERF_SKIP_SUBMISSION_GENERATION=False --env.MLC_DOCKER_PRIVILEGED_MODE=True --env.MLC_MLPERF_SUBMISSION_DIVISION=open --env.MLC_MLPERF_INFERENCE_TP_SIZE=1 --env.MLC_MLPERF_SUBMISSION_SYSTEM_TYPE=edge --env.MLC_MLPERF_DEVICE=cpu --env.MLC_MLPERF_USE_DOCKER=True --env.MLC_MLPERF_BACKEND=onnxruntime --env.MLC_MLPERF_LOADGEN_SCENARIO=Offline --env.MLC_TEST_QUERY_COUNT=1000 --env.MLC_USE_DATASET_FROM_HOST=yes --env.MLC_MLPERF_FIND_PERFORMANCE_MODE=yes --env.MLC_MLPERF_LOADGEN_ALL_MODES=no --env.MLC_MLPERF_LOADGEN_MODE=performance --env.MLC_MLPERF_RESULT_PUSH_TO_GITHUB=False --env.MLC_MLPERF_SUBMISSION_GENERATION_STYLE=full --env.MLC_MLPERF_INFERENCE_VERSION=5.0-dev --env.MLC_RUN_MLPERF_INFERENCE_APP_DEFAULTS=r5.0-dev_default --env.MLC_MLPERF_SUBMISSION_CHECKER_VERSION=v5.0 --env.MLC_MLPERF_INFERENCE_SOURCE_VERSION=5.0.15 --env.MLC_MLPERF_LAST_RELEASE=v5.0 --env.MLC_MLPERF_INFERENCE_RESULTS_VERSION=r5.0-dev --env.MLC_MODEL=resnet50 --env.MLC_MLPERF_LOADGEN_COMPLIANCE=no --env.MLC_MLPERF_LOADGEN_EXTRA_OPTIONS= --env.MLC_MLPERF_LOADGEN_SCENARIOS,=Offline --env.MLC_MLPERF_LOADGEN_MODES,=performance --env.MLC_OUTPUT_FOLDER_NAME=test_results --add_deps_recursive.coco2014-original.tags=_full --add_deps_recursive.coco2014-preprocessed.tags=_full --add_deps_recursive.imagenet-original.tags=_full --add_deps_recursive.imagenet-preprocessed.tags=_full --add_deps_recursive.openimages-original.tags=_full --add_deps_recursive.openimages-preprocessed.tags=_full --add_deps_recursive.openorca-original.tags=_full --add_deps_recursive.openorca-preprocessed.tags=_full --add_deps_recursive.coco2014-dataset.tags=_full --add_deps_recursive.igbh-dataset.tags=_full --add_deps_recursive.get-mlperf-inference-results-dir.tags=_version.r5.0-dev --add_deps_recursive.get-mlperf-inference-submission-dir.tags=_version.r5.0-dev --add_deps_recursive.mlperf-inference-nvidia-scratch-space.tags=_version.r5.0-dev --v=False --print_env=False --print_deps=False --dump_version_info=True --quiet --fake_run --env.MLC_RUN_STATE_DOCKER=True
47 |
ERROR: failed to solve: process "/bin/sh -c mlc pull repo && mlcr --tags=app,mlperf,inference,generic,_reference,_resnet50,_onnxruntime,_cpu,_test,_r5.0-dev_default,_offline --quiet=true --env.MLC_QUIET=yes --env.MLC_WINDOWS=yes --env.MLC_MLPERF_IMPLEMENTATION=reference --env.MLC_MLPERF_MODEL=resnet50 --env.MLC_MLPERF_RUN_STYLE=test --env.MLC_MLPERF_SKIP_SUBMISSION_GENERATION=False --env.MLC_DOCKER_PRIVILEGED_MODE=True --env.MLC_MLPERF_SUBMISSION_DIVISION=open --env.MLC_MLPERF_INFERENCE_TP_SIZE=1 --env.MLC_MLPERF_SUBMISSION_SYSTEM_TYPE=edge --env.MLC_MLPERF_DEVICE=cpu --env.MLC_MLPERF_USE_DOCKER=True --env.MLC_MLPERF_BACKEND=onnxruntime --env.MLC_MLPERF_LOADGEN_SCENARIO=Offline --env.MLC_TEST_QUERY_COUNT=1000 --env.MLC_USE_DATASET_FROM_HOST=yes --env.MLC_MLPERF_FIND_PERFORMANCE_MODE=yes --env.MLC_MLPERF_LOADGEN_ALL_MODES=no --env.MLC_MLPERF_LOADGEN_MODE=performance --env.MLC_MLPERF_RESULT_PUSH_TO_GITHUB=False --env.MLC_MLPERF_SUBMISSION_GENERATION_STYLE=full --env.MLC_MLPERF_INFERENCE_VERSION=5.0-dev --env.MLC_RUN_MLPERF_INFERENCE_APP_DEFAULTS=r5.0-dev_default --env.MLC_MLPERF_SUBMISSION_CHECKER_VERSION=v5.0 --env.MLC_MLPERF_INFERENCE_SOURCE_VERSION=5.0.15 --env.MLC_MLPERF_LAST_RELEASE=v5.0 --env.MLC_MLPERF_INFERENCE_RESULTS_VERSION=r5.0-dev --env.MLC_MODEL=resnet50 --env.MLC_MLPERF_LOADGEN_COMPLIANCE=no --env.MLC_MLPERF_LOADGEN_EXTRA_OPTIONS= --env.MLC_MLPERF_LOADGEN_SCENARIOS,=Offline --env.MLC_MLPERF_LOADGEN_MODES,=performance --env.MLC_OUTPUT_FOLDER_NAME=test_results --add_deps_recursive.coco2014-original.tags=_full --add_deps_recursive.coco2014-preprocessed.tags=_full --add_deps_recursive.imagenet-original.tags=_full --add_deps_recursive.imagenet-preprocessed.tags=_full --add_deps_recursive.openimages-original.tags=_full --add_deps_recursive.openimages-preprocessed.tags=_full --add_deps_recursive.openorca-original.tags=_full --add_deps_recursive.openorca-preprocessed.tags=_full --add_deps_recursive.coco2014-dataset.tags=_full --add_deps_recursive.igbh-dataset.tags=_full --add_deps_recursive.get-mlperf-inference-results-dir.tags=_version.r5.0-dev --add_deps_recursive.get-mlperf-inference-submission-dir.tags=_version.r5.0-dev --add_deps_recursive.mlperf-inference-nvidia-scratch-space.tags=_version.r5.0-dev --v=False --print_env=False --print_deps=False --dump_version_info=True --quiet --fake_run --env.MLC_RUN_STATE_DOCKER=True" did not complete successfully: exit code: 1
View build details: docker-desktop://dashboard/build/desktop-linux/desktop-linux/npcpkq8ija57ipgur3v6vzzou
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in run_code
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Scripts\mlcr.exe_main.py", line 7, in
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1670, in mlcr
main()
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1752, in main
res = method(run_args)
^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1491, in call_script_module_function
result = automation_instance.run(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 225, in run
r = self._run(i)
^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 1772, in _run
r = customize_code.preprocess(ii)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\script\run-mlperf-inference-app\customize.py", line 286, in preprocess
r = mlc.access(ii)
^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1508, in docker
return self.call_script_module_function("docker", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1493, in call_script_module_function
result = automation_instance.docker(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 4691, in docker
return docker_run(self, i)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\docker.py", line 381, in docker_run
r = self_module.action_object.access(mlc_docker_input)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1491, in call_script_module_function
result = automation_instance.run(run_args) # Pass args to the run method
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 225, in run
r = self._run(i)
^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 1842, in _run
r = self._call_run_deps(prehook_deps, self.local_env_keys, local_env_keys_from_meta, env, state, const, const_state, add_deps_recursive,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 3532, in _call_run_deps
r = script._run_deps(deps, local_env_keys, env, state, const, const_state, add_deps_recursive, recursion_spaces,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\MLC\repos\mlcommons@mlperf-automations\automation\script\module.py", line 3702, in _run_deps
r = self.action_object.access(ii)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 96, in access
result = method(self, options)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1511, in run
return self.call_script_module_function("run", run_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\CSEMA\AppData\Local\Programs\Python\Python312\Lib\site-packages\mlc\main.py", line 1501, in call_script_module_function
raise ScriptExecutionError(f"Script {function_name} execution failed. Error : {error}")
mlc.main.ScriptExecutionError: Script run execution failed. Error : MLC script failed (name = build-docker-image, return code = 1)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Please file an issue at https://github.com/mlcommons/mlperf-automations/issues along with the full MLC command being run and the relevant
or full console log.
The text was updated successfully, but these errors were encountered: