Skip to content

Commit 936e930

Browse files
PaliCpytorchmergebot
authored andcommitted
Delete torch::deploy from pytorch core (pytorch#85953)
As we have migrated torch::deploy over to https://github.com/pytorch/multipy, we can now delete it from pytorch core as ongoing development will happen there. This PR was created due to syncing issues with pytorch#85443 which is where the review history can be found. Pull Request resolved: pytorch#85953 Approved by: https://github.com/seemethere, https://github.com/malfet
1 parent 27c3fb0 commit 936e930

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

82 files changed

+9
-8172
lines changed

.github/workflows/pull.yml

-20
Original file line numberDiff line numberDiff line change
@@ -302,26 +302,6 @@ jobs:
302302
docker-image-name: pytorch-linux-focal-py3.7-gcc7
303303
build-generates-artifacts: false
304304

305-
linux-bionic-cuda11_6-py3_10-gcc7-deploy-build:
306-
name: linux-bionic-cuda11_6-py3_10-gcc7-deploy
307-
uses: ./.github/workflows/_linux-build.yml
308-
with:
309-
build-environment: linux-bionic-cuda11.6-py3.10-gcc7-deploy
310-
docker-image-name: pytorch-linux-bionic-cuda11.6-cudnn8-py3-gcc7
311-
test-matrix: |
312-
{ include: [
313-
{ config: "deploy", shard: 1, num_shards: 1, runner: "linux.4xlarge.nvidia.gpu" },
314-
]}
315-
316-
deploy-linux-bionic-cuda11_6-py3_10-gcc7-test:
317-
name: linux-bionic-cuda11_6-py3_10-gcc7-deploy
318-
uses: ./.github/workflows/_linux-test.yml
319-
needs: linux-bionic-cuda11_6-py3_10-gcc7-deploy-build
320-
with:
321-
build-environment: linux-bionic-cuda11.6-py3.10-gcc7-deploy
322-
docker-image: ${{ needs.linux-bionic-cuda11_6-py3_10-gcc7-deploy-build.outputs.docker-image }}
323-
test-matrix: ${{ needs.linux-bionic-cuda11_6-py3_10-gcc7-deploy-build.outputs.test-matrix }}
324-
325305
linux-focal-rocm5_2-py3_7-build:
326306
# don't run build twice on master
327307
if: github.event_name == 'pull_request'

.gitignore

-4
Original file line numberDiff line numberDiff line change
@@ -78,10 +78,6 @@ torch/testing/_internal/generated/annotated_fn_args.py
7878
torch/testing/_internal/data/*.pt
7979
torch/csrc/api/include/torch/version.h
8080
torch/csrc/cudnn/cuDNN.cpp
81-
torch/csrc/deploy/example/generated
82-
torch/csrc/deploy/interpreter/cpython
83-
torch/csrc/deploy/interpreter/frozen
84-
torch/csrc/deploy/interpreter/third_party/typing_extensions.py
8581
torch/csrc/generated
8682
torch/csrc/generic/TensorMethods.cpp
8783
torch/csrc/jit/generated/*

.jenkins/pytorch/build.sh

-6
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,6 @@ if [[ "$BUILD_ENVIRONMENT" == *-mobile-*build* ]]; then
1919
exec "$(dirname "${BASH_SOURCE[0]}")/build-mobile.sh" "$@"
2020
fi
2121

22-
if [[ "$BUILD_ENVIRONMENT" == *deploy* ]]; then
23-
# Enabling DEPLOY build (embedded torch python interpreter, experimental)
24-
# only on one config for now, can expand later
25-
export USE_DEPLOY=ON
26-
fi
27-
2822
echo "Python version:"
2923
python --version
3024

.jenkins/pytorch/test.sh

+1-14
Original file line numberDiff line numberDiff line change
@@ -655,16 +655,6 @@ test_dynamo() {
655655
popd
656656
}
657657

658-
test_torch_deploy() {
659-
python torch/csrc/deploy/example/generate_examples.py
660-
ln -sf "$TORCH_LIB_DIR"/libtorch* "$TORCH_BIN_DIR"
661-
ln -sf "$TORCH_LIB_DIR"/libshm* "$TORCH_BIN_DIR"
662-
ln -sf "$TORCH_LIB_DIR"/libc10* "$TORCH_BIN_DIR"
663-
"$TORCH_BIN_DIR"/test_deploy
664-
"$TORCH_BIN_DIR"/test_deploy_gpu
665-
assert_git_not_dirty
666-
}
667-
668658
test_docs_test() {
669659
.jenkins/pytorch/docs-test.sh
670660
}
@@ -673,10 +663,7 @@ if ! [[ "${BUILD_ENVIRONMENT}" == *libtorch* || "${BUILD_ENVIRONMENT}" == *-baze
673663
(cd test && python -c "import torch; print(torch.__config__.show())")
674664
(cd test && python -c "import torch; print(torch.__config__.parallel_info())")
675665
fi
676-
if [[ "${TEST_CONFIG}" == *deploy* ]]; then
677-
install_torchdynamo
678-
test_torch_deploy
679-
elif [[ "${TEST_CONFIG}" == *backward* ]]; then
666+
if [[ "${TEST_CONFIG}" == *backward* ]]; then
680667
test_forward_backward_compatibility
681668
# Do NOT add tests after bc check tests, see its comment.
682669
elif [[ "${TEST_CONFIG}" == *xla* ]]; then

.lintrunner.toml

-7
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,6 @@ command = [
170170
[[linter]]
171171
code = 'CLANGTIDY'
172172
include_patterns = [
173-
'torch/csrc/deploy/**/*.cpp',
174173
'torch/csrc/fx/**/*.cpp',
175174
'torch/csrc/generic/**/*.cpp',
176175
'torch/csrc/onnx/**/*.cpp',
@@ -183,7 +182,6 @@ exclude_patterns = [
183182
# FunctionsManual.cpp is excluded to keep this diff clean. It will be fixed
184183
# in a follow up PR.
185184
# /torch/csrc/generic/*.cpp is excluded because those files aren't actually built.
186-
# deploy/interpreter files are excluded due to using macros and other techniquies
187185
# that are not easily converted to accepted c++
188186
'torch/csrc/jit/passes/onnx/helper.cpp',
189187
'torch/csrc/jit/passes/onnx/shape_type_inference.cpp',
@@ -197,11 +195,6 @@ exclude_patterns = [
197195
'torch/csrc/autograd/FunctionsManual.cpp',
198196
'torch/csrc/generic/*.cpp',
199197
'torch/csrc/jit/codegen/cuda/runtime/*',
200-
'torch/csrc/deploy/interactive_embedded_interpreter.cpp',
201-
'torch/csrc/deploy/interpreter/**',
202-
'torch/csrc/deploy/test_deploy_python_ext.cpp',
203-
'torch/csrc/deploy/test_deploy_missing_interpreter.cpp',
204-
'torch/csrc/deploy/test_deploy_gpu.cpp',
205198
'torch/csrc/utils/disable_torch_function.cpp',
206199
]
207200
init_command = [

BUILD.bazel

-1
Original file line numberDiff line numberDiff line change
@@ -1748,7 +1748,6 @@ cc_library(
17481748
# Torch integration tests rely on a labeled data set from the MNIST database.
17491749
# http://yann.lecun.com/exdb/mnist/
17501750

1751-
# imethod.cpp is excluded since torch/csrc/deploy* build is not yet supported.
17521751
cpp_api_tests = glob(
17531752
["test/cpp/api/*.cpp"],
17541753
exclude = [

CMakeLists.txt

-8
Original file line numberDiff line numberDiff line change
@@ -345,9 +345,6 @@ cmake_dependent_option(
345345
option(ONNX_ML "Enable traditional ONNX ML API." ON)
346346
option(HAVE_SOVERSION "Whether to add SOVERSION to the shared objects" OFF)
347347
option(BUILD_LIBTORCH_CPU_WITH_DEBUG "Enable RelWithDebInfo for libtorch_cpu target only" OFF)
348-
cmake_dependent_option(
349-
USE_DEPLOY "Build embedded torch::deploy interpreter. See torch/csrc/deploy/README.md for more info." OFF
350-
"BUILD_PYTHON" OFF)
351348
cmake_dependent_option(USE_CCACHE "Attempt using CCache to wrap the compilation" ON "UNIX" OFF)
352349
option(WERROR "Build with -Werror supported by the compiler" OFF)
353350
option(USE_COREML_DELEGATE "Use the CoreML backend through delegate APIs" OFF)
@@ -1177,11 +1174,6 @@ endif()
11771174
include(cmake/Summary.cmake)
11781175
caffe2_print_configuration_summary()
11791176

1180-
# ---[ Torch Deploy
1181-
if(USE_DEPLOY)
1182-
add_subdirectory(torch/csrc/deploy)
1183-
endif()
1184-
11851177
if(BUILD_FUNCTORCH)
11861178
add_subdirectory(functorch)
11871179
endif()

caffe2/CMakeLists.txt

-6
Original file line numberDiff line numberDiff line change
@@ -1154,12 +1154,6 @@ install(FILES
11541154
"${TORCH_SRC_DIR}/library.h"
11551155
"${TORCH_SRC_DIR}/custom_class_detail.h"
11561156
DESTINATION ${TORCH_INSTALL_INCLUDE_DIR}/torch)
1157-
if(USE_DEPLOY)
1158-
install(FILES
1159-
"${TORCH_SRC_DIR}/deploy.h"
1160-
DESTINATION ${TORCH_INSTALL_INCLUDE_DIR}/torch)
1161-
endif()
1162-
11631157
if(BUILD_TEST)
11641158
if(BUILD_LITE_INTERPRETER)
11651159
add_subdirectory(

cmake/Summary.cmake

-1
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,6 @@ function(caffe2_print_configuration_summary)
193193
if(NOT "${SELECTED_OP_LIST}" STREQUAL "")
194194
message(STATUS " SELECTED_OP_LIST : ${SELECTED_OP_LIST}")
195195
endif()
196-
message(STATUS " USE_DEPLOY : ${USE_DEPLOY}")
197196
message(STATUS " Public Dependencies : ${Caffe2_PUBLIC_DEPENDENCY_LIBS}")
198197
message(STATUS " Private Dependencies : ${Caffe2_DEPENDENCY_LIBS}")
199198
# coreml

0 commit comments

Comments
 (0)