Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Delete torch::deploy from pytorch core #85953

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
20 changes: 0 additions & 20 deletions .github/workflows/pull.yml
Expand Up @@ -302,26 +302,6 @@ jobs:
docker-image-name: pytorch-linux-focal-py3.7-gcc7
build-generates-artifacts: false

linux-bionic-cuda11_6-py3_10-gcc7-deploy-build:
name: linux-bionic-cuda11_6-py3_10-gcc7-deploy
uses: ./.github/workflows/_linux-build.yml
with:
build-environment: linux-bionic-cuda11.6-py3.10-gcc7-deploy
docker-image-name: pytorch-linux-bionic-cuda11.6-cudnn8-py3-gcc7
test-matrix: |
{ include: [
{ config: "deploy", shard: 1, num_shards: 1, runner: "linux.4xlarge.nvidia.gpu" },
]}

deploy-linux-bionic-cuda11_6-py3_10-gcc7-test:
name: linux-bionic-cuda11_6-py3_10-gcc7-deploy
uses: ./.github/workflows/_linux-test.yml
needs: linux-bionic-cuda11_6-py3_10-gcc7-deploy-build
with:
build-environment: linux-bionic-cuda11.6-py3.10-gcc7-deploy
docker-image: ${{ needs.linux-bionic-cuda11_6-py3_10-gcc7-deploy-build.outputs.docker-image }}
test-matrix: ${{ needs.linux-bionic-cuda11_6-py3_10-gcc7-deploy-build.outputs.test-matrix }}

linux-focal-rocm5_2-py3_7-build:
# don't run build twice on master
if: github.event_name == 'pull_request'
Expand Down
4 changes: 0 additions & 4 deletions .gitignore
Expand Up @@ -78,10 +78,6 @@ torch/testing/_internal/generated/annotated_fn_args.py
torch/testing/_internal/data/*.pt
torch/csrc/api/include/torch/version.h
torch/csrc/cudnn/cuDNN.cpp
torch/csrc/deploy/example/generated
torch/csrc/deploy/interpreter/cpython
torch/csrc/deploy/interpreter/frozen
torch/csrc/deploy/interpreter/third_party/typing_extensions.py
torch/csrc/generated
torch/csrc/generic/TensorMethods.cpp
torch/csrc/jit/generated/*
Expand Down
6 changes: 0 additions & 6 deletions .jenkins/pytorch/build.sh
Expand Up @@ -19,12 +19,6 @@ if [[ "$BUILD_ENVIRONMENT" == *-mobile-*build* ]]; then
exec "$(dirname "${BASH_SOURCE[0]}")/build-mobile.sh" "$@"
fi

if [[ "$BUILD_ENVIRONMENT" == *deploy* ]]; then
# Enabling DEPLOY build (embedded torch python interpreter, experimental)
# only on one config for now, can expand later
export USE_DEPLOY=ON
fi

echo "Python version:"
python --version

Expand Down
15 changes: 1 addition & 14 deletions .jenkins/pytorch/test.sh
Expand Up @@ -655,16 +655,6 @@ test_dynamo() {
popd
}

test_torch_deploy() {
python torch/csrc/deploy/example/generate_examples.py
ln -sf "$TORCH_LIB_DIR"/libtorch* "$TORCH_BIN_DIR"
ln -sf "$TORCH_LIB_DIR"/libshm* "$TORCH_BIN_DIR"
ln -sf "$TORCH_LIB_DIR"/libc10* "$TORCH_BIN_DIR"
"$TORCH_BIN_DIR"/test_deploy
"$TORCH_BIN_DIR"/test_deploy_gpu
assert_git_not_dirty
}

test_docs_test() {
.jenkins/pytorch/docs-test.sh
}
Expand All @@ -673,10 +663,7 @@ if ! [[ "${BUILD_ENVIRONMENT}" == *libtorch* || "${BUILD_ENVIRONMENT}" == *-baze
(cd test && python -c "import torch; print(torch.__config__.show())")
(cd test && python -c "import torch; print(torch.__config__.parallel_info())")
fi
if [[ "${TEST_CONFIG}" == *deploy* ]]; then
install_torchdynamo
test_torch_deploy
elif [[ "${TEST_CONFIG}" == *backward* ]]; then
if [[ "${TEST_CONFIG}" == *backward* ]]; then
test_forward_backward_compatibility
# Do NOT add tests after bc check tests, see its comment.
elif [[ "${TEST_CONFIG}" == *xla* ]]; then
Expand Down
7 changes: 0 additions & 7 deletions .lintrunner.toml
Expand Up @@ -170,7 +170,6 @@ command = [
[[linter]]
code = 'CLANGTIDY'
include_patterns = [
'torch/csrc/deploy/**/*.cpp',
'torch/csrc/fx/**/*.cpp',
'torch/csrc/generic/**/*.cpp',
'torch/csrc/onnx/**/*.cpp',
Expand All @@ -183,7 +182,6 @@ exclude_patterns = [
# FunctionsManual.cpp is excluded to keep this diff clean. It will be fixed
# in a follow up PR.
# /torch/csrc/generic/*.cpp is excluded because those files aren't actually built.
# deploy/interpreter files are excluded due to using macros and other techniquies
# that are not easily converted to accepted c++
'torch/csrc/jit/passes/onnx/helper.cpp',
'torch/csrc/jit/passes/onnx/shape_type_inference.cpp',
Expand All @@ -197,11 +195,6 @@ exclude_patterns = [
'torch/csrc/autograd/FunctionsManual.cpp',
'torch/csrc/generic/*.cpp',
'torch/csrc/jit/codegen/cuda/runtime/*',
'torch/csrc/deploy/interactive_embedded_interpreter.cpp',
'torch/csrc/deploy/interpreter/**',
'torch/csrc/deploy/test_deploy_python_ext.cpp',
'torch/csrc/deploy/test_deploy_missing_interpreter.cpp',
'torch/csrc/deploy/test_deploy_gpu.cpp',
'torch/csrc/utils/disable_torch_function.cpp',
]
init_command = [
Expand Down
1 change: 0 additions & 1 deletion BUILD.bazel
Expand Up @@ -1748,7 +1748,6 @@ cc_library(
# Torch integration tests rely on a labeled data set from the MNIST database.
# http://yann.lecun.com/exdb/mnist/

# imethod.cpp is excluded since torch/csrc/deploy* build is not yet supported.
cpp_api_tests = glob(
["test/cpp/api/*.cpp"],
exclude = [
Expand Down
8 changes: 0 additions & 8 deletions CMakeLists.txt
Expand Up @@ -345,9 +345,6 @@ cmake_dependent_option(
option(ONNX_ML "Enable traditional ONNX ML API." ON)
option(HAVE_SOVERSION "Whether to add SOVERSION to the shared objects" OFF)
option(BUILD_LIBTORCH_CPU_WITH_DEBUG "Enable RelWithDebInfo for libtorch_cpu target only" OFF)
cmake_dependent_option(
USE_DEPLOY "Build embedded torch::deploy interpreter. See torch/csrc/deploy/README.md for more info." OFF
"BUILD_PYTHON" OFF)
cmake_dependent_option(USE_CCACHE "Attempt using CCache to wrap the compilation" ON "UNIX" OFF)
option(WERROR "Build with -Werror supported by the compiler" OFF)
option(USE_COREML_DELEGATE "Use the CoreML backend through delegate APIs" OFF)
Expand Down Expand Up @@ -1177,11 +1174,6 @@ endif()
include(cmake/Summary.cmake)
caffe2_print_configuration_summary()

# ---[ Torch Deploy
if(USE_DEPLOY)
add_subdirectory(torch/csrc/deploy)
endif()

if(BUILD_FUNCTORCH)
add_subdirectory(functorch)
endif()
6 changes: 0 additions & 6 deletions caffe2/CMakeLists.txt
Expand Up @@ -1154,12 +1154,6 @@ install(FILES
"${TORCH_SRC_DIR}/library.h"
"${TORCH_SRC_DIR}/custom_class_detail.h"
DESTINATION ${TORCH_INSTALL_INCLUDE_DIR}/torch)
if(USE_DEPLOY)
install(FILES
"${TORCH_SRC_DIR}/deploy.h"
DESTINATION ${TORCH_INSTALL_INCLUDE_DIR}/torch)
endif()

if(BUILD_TEST)
if(BUILD_LITE_INTERPRETER)
add_subdirectory(
Expand Down
1 change: 0 additions & 1 deletion cmake/Summary.cmake
Expand Up @@ -193,7 +193,6 @@ function(caffe2_print_configuration_summary)
if(NOT "${SELECTED_OP_LIST}" STREQUAL "")
message(STATUS " SELECTED_OP_LIST : ${SELECTED_OP_LIST}")
endif()
message(STATUS " USE_DEPLOY : ${USE_DEPLOY}")
message(STATUS " Public Dependencies : ${Caffe2_PUBLIC_DEPENDENCY_LIBS}")
message(STATUS " Private Dependencies : ${Caffe2_DEPENDENCY_LIBS}")
# coreml
Expand Down