Skip to content

Commit 8473173

Browse files
malfetpytorchmergebot
authored andcommitted
Remove breakpad dependency
This functionality does not seem to be used and there are some requests to update dependency. Add `third_party` to torch_cpu include directories if compiling with Caffe2 support, as `caffe2/quantization/server/conv_dnnlowp_op.cc` depends on `third_party/fbgemm/src/RefImplementations.h` Pull Request resolved: pytorch#75394 Approved by: https://github.com/janeyx99, https://github.com/seemethere
1 parent 4441582 commit 8473173

File tree

16 files changed

+5
-373
lines changed

16 files changed

+5
-373
lines changed

.gitmodules

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -139,9 +139,6 @@
139139
[submodule "third_party/pocketfft"]
140140
path = third_party/pocketfft
141141
url = https://github.com/mreineck/pocketfft
142-
[submodule "third_party/breakpad"]
143-
path = third_party/breakpad
144-
url = https://github.com/driazati/breakpad.git
145142
[submodule "third_party/flatbuffers"]
146143
path = third_party/flatbuffers
147144
url = https://github.com/google/flatbuffers.git

CMakeLists.txt

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,6 @@ cmake_dependent_option(
207207
"USE_CUDNN" OFF)
208208
option(USE_FBGEMM "Use FBGEMM (quantized 8-bit server operators)" ON)
209209
option(USE_KINETO "Use Kineto profiling library" ON)
210-
option(USE_BREAKPAD "Use breakpad crash dump library" ON)
211210
option(USE_CUPTI_SO "Use CUPTI as a shared library" ON)
212211
option(USE_FAKELOWP "Use FakeLowp operators" OFF)
213212
option(USE_FFMPEG "Use ffmpeg" OFF)
@@ -271,10 +270,6 @@ if(NOT DEFINED USE_VULKAN)
271270
"ANDROID" OFF)
272271
endif()
273272

274-
if(IOS)
275-
set(USE_BREAKPAD OFF)
276-
endif()
277-
278273
option(USE_SLEEF_FOR_ARM_VEC256 "Use sleef for arm" OFF)
279274
option(USE_SOURCE_DEBUG_ON_MOBILE "Enable " ON)
280275
option(USE_LITE_INTERPRETER_PROFILER "Enable " ON)

caffe2/CMakeLists.txt

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -737,7 +737,6 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
737737
${TORCH_SRC_DIR}/csrc/api/src/optim/schedulers/step_lr.cpp
738738
${TORCH_SRC_DIR}/csrc/api/src/serialize/input-archive.cpp
739739
${TORCH_SRC_DIR}/csrc/api/src/serialize/output-archive.cpp
740-
${TORCH_SRC_DIR}/csrc/utils/crash_handler.cpp
741740
)
742741
endif()
743742

@@ -1076,10 +1075,9 @@ if(USE_TBB)
10761075
target_link_libraries(torch_cpu PUBLIC TBB::tbb)
10771076
endif()
10781077

1079-
if(USE_BREAKPAD)
1080-
target_compile_definitions(torch_cpu PRIVATE ADD_BREAKPAD_SIGNAL_HANDLER)
1081-
target_include_directories(torch_cpu PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../third_party ${CMAKE_CURRENT_LIST_DIR}/../third_party/breakpad/src)
1082-
target_link_libraries(torch_cpu PRIVATE breakpad)
1078+
if(BUILD_CAFFE2 AND BUILD_CAFFE2_OPS AND USE_FBGEMM)
1079+
# FIXME: quantization/server/conv_dnnlowp_op.cc depends on fbgemm/src/RefImplementations.h
1080+
target_include_directories(torch_cpu PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../third_party)
10831081
endif()
10841082

10851083
target_include_directories(torch_cpu PRIVATE ${ATen_CPU_INCLUDE})

cmake/Dependencies.cmake

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1847,10 +1847,6 @@ set_target_properties(fmt-header-only PROPERTIES INTERFACE_COMPILE_FEATURES "")
18471847
list(APPEND Caffe2_DEPENDENCY_LIBS fmt::fmt-header-only)
18481848
set(BUILD_SHARED_LIBS ${TEMP_BUILD_SHARED_LIBS} CACHE BOOL "Build shared libs" FORCE)
18491849

1850-
if(USE_BREAKPAD)
1851-
add_subdirectory(${CMAKE_CURRENT_LIST_DIR}/../third_party/breakpad)
1852-
endif()
1853-
18541850
# ---[ Kineto
18551851
# edge profiler depends on KinetoProfiler but it only does cpu
18561852
# profiling. Thus we dont need USE_CUDA/USE_ROCM

cmake/Summary.cmake

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,6 @@ function(caffe2_print_configuration_summary)
187187
message(STATUS " SELECTED_OP_LIST : ${SELECTED_OP_LIST}")
188188
endif()
189189
message(STATUS " USE_DEPLOY : ${USE_DEPLOY}")
190-
message(STATUS " USE_BREAKPAD : ${USE_BREAKPAD}")
191190
message(STATUS " Public Dependencies : ${Caffe2_PUBLIC_DEPENDENCY_LIBS}")
192191
message(STATUS " Private Dependencies : ${Caffe2_DEPENDENCY_LIBS}")
193192
# coreml

test/test_cpp_extensions_jit.py

Lines changed: 1 addition & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,12 @@
1010
import subprocess
1111
import glob
1212

13-
import textwrap
14-
from multiprocessing import Process
15-
1613
import torch.testing._internal.common_utils as common
1714
import torch
1815
import torch.backends.cudnn
1916
import torch.utils.cpp_extension
2017
from torch.utils.cpp_extension import CUDA_HOME, ROCM_HOME
21-
from torch.testing._internal.common_utils import gradcheck, TEST_WITH_ASAN, has_breakpad
18+
from torch.testing._internal.common_utils import gradcheck
2219

2320

2421
TEST_CUDA = torch.cuda.is_available() and CUDA_HOME is not None
@@ -869,80 +866,6 @@ def test_custom_compound_op_autograd(self):
869866

870867
gradcheck(torch.ops.my.add, [a, b], eps=1e-2)
871868

872-
@staticmethod
873-
def _crash_handler_test_process(stderr_file, destination):
874-
# Code to enable dumps and trigger a segfault
875-
if sys.platform == "win32":
876-
destination = destination.replace("\\", "\\\\")
877-
csrc = textwrap.dedent(f"""
878-
#include <torch/torch.h>
879-
#include <locale>
880-
#include <iostream>
881-
#include <codecvt>
882-
#include <string>
883-
884-
int fail() {{
885-
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
886-
std::string narrow("{destination}");
887-
std::wstring wide = converter.from_bytes(narrow);
888-
torch::crash_handler::enable_minidumps(wide.c_str());
889-
890-
volatile int* bad = nullptr;
891-
return *bad;
892-
}}
893-
""")
894-
else:
895-
csrc = textwrap.dedent(f"""
896-
#include <torch/torch.h>
897-
898-
int fail() {{
899-
torch::crash_handler::enable_minidumps("{destination}");
900-
901-
volatile int* bad = nullptr;
902-
return *bad;
903-
}}
904-
""")
905-
906-
# Some special stuff to overwrite stderr for a C++ extension
907-
# Copied from: https://stackoverflow.com/questions/8804893/redirect-stdout-from-python-for-c-calls
908-
sys.stdout.flush()
909-
newstdout = os.dup(2)
910-
devnull = os.open(stderr_file, os.O_WRONLY)
911-
os.dup2(devnull, 2)
912-
os.close(devnull)
913-
sys.stdout = os.fdopen(newstdout, 'w')
914-
915-
module = torch.utils.cpp_extension.load_inline(
916-
name="segfault",
917-
cpp_sources=csrc,
918-
functions=["fail"],
919-
)
920-
module.fail()
921-
922-
@unittest.skipIf(TEST_WITH_ASAN, "ASAN disables the crash handler's signal handler")
923-
@unittest.skipIf(not has_breakpad(), "Built without breakpad")
924-
@unittest.skipIf(os.environ.get("TEST_CONFIG") == "force_on_cpu", "fails on force_on_cpu config, tracked w/ #65253")
925-
def test_crash_handler(self):
926-
with tempfile.TemporaryDirectory() as temp_dir, tempfile.NamedTemporaryFile(delete=not sys.platform == "win32") as stderr:
927-
# Use multiprocessing to spin up a separate process to make catching
928-
# the segfault easier
929-
p = Process(target=self._crash_handler_test_process, args=(stderr.name, temp_dir))
930-
p.start()
931-
p.join()
932-
933-
with open(stderr.name) as f:
934-
result = f.read().strip()
935-
936-
# Check that the signal handler was called
937-
self.assertTrue(result.startswith(f"Wrote minidump to {temp_dir}"))
938-
939-
with open(result.replace("Wrote minidump to ", ""), "rb") as dump_file:
940-
dump_bytes = dump_file.read()
941-
942-
# Check that the file has the correct magic number
943-
self.assertEqual(b"MDMP", dump_bytes[0:4])
944-
945-
946869

947870
if __name__ == "__main__":
948871
common.run_tests()

test/test_utils.py

Lines changed: 1 addition & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22

33
import sys
44
import os
5-
import contextlib
6-
import io
75
import re
86
import shutil
97
import random
@@ -20,7 +18,7 @@
2018
import torch.utils.cpp_extension
2119
from torch.autograd._functions.utils import check_onnx_broadcast
2220
from torch.onnx.symbolic_opset9 import _prepare_onnx_paddings
23-
from torch.testing._internal.common_utils import has_breakpad, load_tests, IS_SANDCASTLE, IS_WINDOWS, TEST_WITH_ASAN
21+
from torch.testing._internal.common_utils import load_tests, IS_SANDCASTLE, IS_WINDOWS
2422

2523
# load_tests from torch.testing._internal.common_utils is used to automatically filter tests for
2624
# sharding on sandcastle. This line silences flake warnings
@@ -610,32 +608,6 @@ def forward(self, x):
610608
ms(torch.tensor([False], dtype=torch.bool))
611609

612610

613-
class TestCrashHandler(TestCase):
614-
@unittest.skipIf(TEST_WITH_ASAN, "ASAN disables the crash handler's signal handler")
615-
@unittest.skipIf(not has_breakpad(), "Built without breakpad")
616-
def test_python_exception_writing(self):
617-
with tempfile.TemporaryDirectory() as temp_dir:
618-
torch.utils._crash_handler.enable_minidumps(temp_dir)
619-
torch.utils._crash_handler.enable_minidumps_on_exceptions()
620-
621-
files = os.listdir(temp_dir)
622-
self.assertEqual(len(files), 0)
623-
624-
f = io.StringIO()
625-
with contextlib.redirect_stderr(f):
626-
try:
627-
@torch.jit.script
628-
def x(i: int):
629-
return i + "2" # type: ignore[operator]
630-
except RuntimeError as e:
631-
pass
632-
633-
files = os.listdir(temp_dir)
634-
self.assertEqual(len(files), 1)
635-
self.assertTrue(files[0].endswith(".dmp"))
636-
torch.utils._crash_handler.disable_minidumps()
637-
638-
639611
@unittest.skipIf(IS_SANDCASTLE, "cpp_extension is OSS only")
640612
class TestStandaloneCPPJIT(TestCase):
641613
def test_load_standalone(self):

third_party/breakpad

Lines changed: 0 additions & 1 deletion
This file was deleted.

tools/build_variables.bzl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -821,7 +821,6 @@ torch_cpp_srcs = [
821821
"torch/csrc/api/src/optim/schedulers/step_lr.cpp",
822822
"torch/csrc/api/src/serialize/input-archive.cpp",
823823
"torch/csrc/api/src/serialize/output-archive.cpp",
824-
"torch/csrc/utils/crash_handler.cpp",
825824
]
826825

827826
libtorch_python_cuda_core_sources = [

torch/csrc/Module.cpp

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@
5555
#include <torch/csrc/utils/tensor_new.h>
5656
#include <torch/csrc/utils/tensor_numpy.h>
5757
#include <torch/csrc/utils/python_dispatch.h>
58-
#include <torch/csrc/utils/crash_handler.h>
5958
#include <torch/csrc/utils/python_arg_parser.h>
6059
#include <torch/csrc/utils/pycfunction_helpers.h>
6160
#include <torch/csrc/lazy/python/init.h>
@@ -65,7 +64,6 @@
6564
#include <torch/csrc/monitor/python_init.h>
6665
#include <torch/csrc/onnx/init.h>
6766
#include <torch/csrc/utils/init.h>
68-
#include <torch/csrc/utils/crash_handler.h>
6967
#include <torch/csrc/api/include/torch/python/init.h>
7068

7169
#ifdef USE_DISTRIBUTED
@@ -820,7 +818,6 @@ PyObject* initModule() {
820818
torch::monitor::initMonitorBindings(module);
821819
torch::impl::dispatch::initDispatchBindings(module);
822820
torch::throughput_benchmark::initThroughputBenchmarkBindings(module);
823-
torch::crash_handler::initCrashHandlerBindings(module);
824821
torch::autograd::initReturnTypes(module);
825822
torch::autograd::initNNFunctions(module);
826823
torch::autograd::initFFTFunctions(module);
@@ -874,10 +871,6 @@ PyObject* initModule() {
874871

875872
// Automatically translate errors thrown from pybind11 functions
876873
py::register_exception_translator([](std::exception_ptr e) { // NOLINT
877-
if (torch::crash_handler::is_enabled_on_exceptions()) {
878-
torch::crash_handler::write_minidump();
879-
}
880-
881874
try {
882875
if (e) {
883876
std::rethrow_exception(e);

torch/csrc/api/include/torch/utils.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
#include <torch/csrc/autograd/grad_mode.h>
66
#include <torch/csrc/autograd/profiler.h>
77
#include <torch/csrc/api/include/torch/types.h>
8-
#include <torch/csrc/utils/crash_handler.h>
98
#include <cstdint>
109

1110
namespace torch {

0 commit comments

Comments
 (0)