Skip to content

Commit 7f7d128

Browse files
smessmerfacebook-github-bot
authored andcommitted
Remove legacy way of exposing caffe2 operators to PyTorch (pytorch#17742)
Summary: Pull Request resolved: pytorch#17742 This path isn't used anymore, and is incompatible with the changes stacked on top of this diff. Removing it. cc bwasti to check and confirm these can really be deleted Reviewed By: ezyang Differential Revision: D14362426 fbshipit-source-id: 32cdc19f28c2a981ae1e204901420998367ee588
1 parent b132f0f commit 7f7d128

9 files changed

+0
-258
lines changed

caffe2/core/operator.cc

-31
Original file line numberDiff line numberDiff line change
@@ -354,23 +354,6 @@ unique_ptr<OperatorBase> CreateOperator(
354354
}
355355
}
356356

357-
void RunOperator(
358-
c10::Symbol name,
359-
const std::vector<c10::IValue>& inputs,
360-
const std::vector<c10::IValue*>& outputs) {
361-
auto fn_wrap =
362-
caffe2::FunctionSchemaRegistry()->Create(name.toUnqualString());
363-
CAFFE_ENFORCE(
364-
fn_wrap,
365-
"Operator not registered with FunctionSchema constructor.",
366-
name.toUnqualString());
367-
auto fn = fn_wrap->getSchema();
368-
auto op = caffe2::FunctionSchemaOperatorRegistry()->Create(
369-
name.toUnqualString(), fn, inputs, outputs);
370-
371-
op->Run();
372-
}
373-
374357
std::map<DeviceType, OperatorRegistry*>* gDeviceTypeRegistry() {
375358
static std::map<DeviceType, OperatorRegistry*> g_device_type_registry;
376359
return &g_device_type_registry;
@@ -403,15 +386,6 @@ C10_DEFINE_REGISTRY(
403386
const OperatorDef&,
404387
const vector<GradientWrapper>&);
405388

406-
C10_DEFINE_REGISTRY(
407-
FunctionSchemaOperatorRegistry,
408-
OperatorBase,
409-
const c10::FunctionSchema,
410-
std::vector<c10::IValue>,
411-
std::vector<c10::IValue*>);
412-
413-
C10_DEFINE_REGISTRY(FunctionSchemaRegistry, FunctionSchemaStorageBase);
414-
415389
GradientOpsMeta GetGradientForOp(
416390
const OperatorDef& def, const vector<GradientWrapper>& g_output) {
417391
std::unique_ptr<GradientMakerBase> maker(
@@ -757,11 +731,6 @@ std::set<std::string> GetRegisteredOperators() {
757731
all_keys.emplace(name);
758732
}
759733

760-
// FunctionSchema registered operators
761-
for (const auto& name : FunctionSchemaOperatorRegistry()->Keys()) {
762-
all_keys.emplace(name);
763-
}
764-
765734
return all_keys;
766735
}
767736

caffe2/core/operator.h

-53
Original file line numberDiff line numberDiff line change
@@ -1239,52 +1239,6 @@ C10_DECLARE_REGISTRY(
12391239
REGISTER_HIP_OPERATOR_WITH_ENGINE(name, MIOPEN, __VA_ARGS__) \
12401240
REGISTER_HIP_OPERATOR_WITH_ENGINE(name, CUDNN, __VA_ARGS__) // Make CUDNN an alias of MIOPEN for HIP ops
12411241

1242-
C10_DECLARE_REGISTRY(
1243-
FunctionSchemaOperatorRegistry,
1244-
OperatorBase,
1245-
const c10::FunctionSchema,
1246-
std::vector<c10::IValue>,
1247-
std::vector<c10::IValue*>);
1248-
1249-
struct FunctionSchemaStorageBase {
1250-
FunctionSchemaStorageBase() {}
1251-
virtual c10::FunctionSchema getSchema() = 0;
1252-
virtual ~FunctionSchemaStorageBase() {}
1253-
};
1254-
1255-
C10_DECLARE_REGISTRY(FunctionSchemaRegistry, FunctionSchemaStorageBase);
1256-
1257-
// Prefer to use the {DECLARE,DEFINE}_FUNCTION_SCHEMA_OPERATOR macros,
1258-
// as they wrap it all in a Meyer's singleton accessible from Torch.
1259-
1260-
#define REGISTER_FUNCTION_SCHEMA_OPERATOR(name, inputs, outputs, impl) \
1261-
C10_REGISTER_CLASS(FunctionSchemaOperatorRegistry, name, impl) \
1262-
struct FunctionSchemaStorageBase##name : public FunctionSchemaStorageBase { \
1263-
c10::FunctionSchema getSchema() override { \
1264-
return c10::FunctionSchema("_caffe2::" #name, inputs, outputs); \
1265-
} \
1266-
}; \
1267-
C10_REGISTER_CLASS( \
1268-
FunctionSchemaRegistry, name, FunctionSchemaStorageBase##name)
1269-
1270-
#define DEFINE_FUNCTION_SCHEMA_OPERATOR(name, inputs, outputs, impl) \
1271-
void CAFFE2_MEYERS_OP_REGISTRATION_##name() { \
1272-
REGISTER_FUNCTION_SCHEMA_OPERATOR(name, inputs, outputs, impl); \
1273-
} \
1274-
static CAFFE2_STRUCT_OP_REGISTRATION_##name \
1275-
CAFFE2_STRUCT_OP_REGISTRATION_DEFN_##name;
1276-
1277-
#define DECLARE_FUNCTION_SCHEMA_OPERATOR(name) \
1278-
CAFFE2_API void CAFFE2_MEYERS_OP_REGISTRATION_##name(); \
1279-
struct CAFFE2_API CAFFE2_STRUCT_OP_REGISTRATION_##name { \
1280-
CAFFE2_STRUCT_OP_REGISTRATION_##name() { \
1281-
CAFFE2_MEYERS_OP_REGISTRATION_##name(); \
1282-
} \
1283-
};
1284-
1285-
#define GET_FUNCTION_SCHEMA(name) \
1286-
FunctionSchemaRegistry()->Create(name)->getSchema()
1287-
12881242
// StaticLinkingProtector is a helper class that ensures that the Caffe2
12891243
// library is linked correctly with whole archives (in the case of static
12901244
// linking). What happens is that when CreateOperator is called for the first
@@ -1342,13 +1296,6 @@ CAFFE2_API unique_ptr<OperatorBase> CreateOperator(
13421296
Workspace* ws,
13431297
int net_position = OperatorBase::kNoNetPositionSet);
13441298

1345-
// Using the new C10 interface and FunctionSchema registry,
1346-
// instantiate and run the operator.
1347-
CAFFE2_API void RunOperator(
1348-
c10::Symbol name,
1349-
const std::vector<c10::IValue>& inputs,
1350-
const std::vector<c10::IValue*>& outputs);
1351-
13521299
CAFFE2_API const std::string OpRegistryKey(
13531300
const std::string& op_type,
13541301
const std::string& engine = "");

caffe2/core/operator_test.cc

-86
Original file line numberDiff line numberDiff line change
@@ -595,90 +595,4 @@ TEST(IsTestArg, non_standard) {
595595
"JustTestWithNonStandardIsTestArg");
596596
}
597597

598-
class TestOperatorWithFunctionSchema final : public Operator<CPUContext> {
599-
public:
600-
TestOperatorWithFunctionSchema(const OperatorDef& def, Workspace* ws)
601-
: Operator<CPUContext>(def, ws) {}
602-
603-
TestOperatorWithFunctionSchema(
604-
const c10::FunctionSchema& f,
605-
const std::vector<c10::IValue>& i,
606-
const std::vector<c10::IValue*>& o)
607-
: Operator<CPUContext>(f, i, o) {
608-
if (HasArgument("test_arg")) {
609-
test_arg_ =
610-
static_cast<float>(this->GetSingleArgument<float>("test_arg", 0.01));
611-
}
612-
}
613-
614-
bool RunOnDevice() override {
615-
auto out =
616-
OutputTensor(0, {1, 1}, at::TensorOptions(TypeMeta::Make<float>()));
617-
out->mutable_data<float>()[0] = test_arg_;
618-
return true;
619-
}
620-
621-
private:
622-
float test_arg_ = 0;
623-
};
624-
625-
REGISTER_CPU_OPERATOR(
626-
TestOperatorWithFunctionSchema,
627-
TestOperatorWithFunctionSchema);
628-
OPERATOR_SCHEMA(TestOperatorWithFunctionSchema)
629-
.NumInputs(0, 1)
630-
.NumOutputs(0, 1)
631-
.Arg("test_arg", "this arg is required", true);
632-
633-
// The new way combines OPERATOR_SCHEMA and REGISTER_OPERATOR
634-
REGISTER_FUNCTION_SCHEMA_OPERATOR(
635-
TestOperatorWithFunctionSchema,
636-
{c10::Argument("test_arg")},
637-
{c10::Argument("output")},
638-
TestOperatorWithFunctionSchema)
639-
640-
TEST(FunctionSchema, Creation) {
641-
std::vector<c10::IValue> inputs;
642-
float test_val = 1337.0f;
643-
inputs.emplace_back(test_val);
644-
645-
caffe2::Tensor out = TensorCPUFromValues<float>({1, 1}, {123.0f});
646-
std::vector<c10::IValue*> outputs;
647-
auto t = at::Tensor(std::move(out.getIntrusivePtr()));
648-
auto out_val = c10::IValue(t);
649-
outputs.emplace_back(&out_val);
650-
651-
auto fn = FunctionSchemaRegistry()
652-
->Create("TestOperatorWithFunctionSchema")
653-
->getSchema();
654-
auto op = FunctionSchemaOperatorRegistry()->Create(
655-
"TestOperatorWithFunctionSchema", fn, inputs, outputs);
656-
657-
op->Run();
658-
EXPECT_EQ(out.data<float>()[0], test_val);
659-
}
660-
661-
TEST(FunctionSchema, OutputChange) {
662-
std::vector<c10::IValue> inputs;
663-
float test_val = 1337.0f;
664-
inputs.emplace_back(test_val);
665-
666-
// Wrong type
667-
caffe2::Tensor out = TensorCPUFromValues<int>({1, 1}, {123});
668-
std::vector<c10::IValue*> outputs;
669-
auto t = at::Tensor(std::move(out.getIntrusivePtr()));
670-
auto out_val = c10::IValue(t);
671-
outputs.emplace_back(&out_val);
672-
673-
auto fn = FunctionSchemaRegistry()
674-
->Create("TestOperatorWithFunctionSchema")
675-
->getSchema();
676-
auto op = FunctionSchemaOperatorRegistry()->Create(
677-
"TestOperatorWithFunctionSchema", fn, inputs, outputs);
678-
679-
op->Run();
680-
out = caffe2::Tensor(out_val.toTensor());
681-
EXPECT_EQ(out.data<float>()[0], test_val);
682-
}
683-
684598
} // namespace caffe2

tools/build_variables.py

-2
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,6 @@
5959
"torch/csrc/jit/ir.cpp",
6060
"torch/csrc/jit/irparser.cpp",
6161
"torch/csrc/jit/netdef_converter.cpp",
62-
"torch/csrc/jit/caffe2_operator.cpp",
63-
"torch/csrc/jit/register_caffe2_ops.cpp",
6462
"torch/csrc/jit/register_c10_ops.cpp",
6563
"torch/csrc/jit/symbolic_script.cpp",
6664
"torch/csrc/jit/operator.cpp",

torch/CMakeLists.txt

-6
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,6 @@ set(TORCH_SRCS
136136
${TORCH_SRC_DIR}/csrc/jit/irparser.cpp
137137
${TORCH_SRC_DIR}/csrc/jit/netdef_converter.cpp
138138
${TORCH_SRC_DIR}/csrc/jit/operator.cpp
139-
${TORCH_SRC_DIR}/csrc/jit/caffe2_operator.cpp
140139
${TORCH_SRC_DIR}/csrc/jit/register_c10_ops.cpp
141140
${TORCH_SRC_DIR}/csrc/jit/symbolic_script.cpp
142141
${TORCH_SRC_DIR}/csrc/jit/passes/alias_analysis.cpp
@@ -195,11 +194,6 @@ set(TORCH_SRCS
195194
${TORCH_ROOT}/test/cpp/jit/no-gtest.cpp
196195
)
197196

198-
if (BUILD_CAFFE2_OPS)
199-
list(APPEND TORCH_SRCS
200-
${TORCH_SRC_DIR}/csrc/jit/register_caffe2_ops.cpp)
201-
endif()
202-
203197
if (WIN32)
204198
list(APPEND TORCH_SRCS
205199
${TORCH_SRC_DIR}/csrc/jit/fuser/cpu/dynamic_library_win.cpp

torch/csrc/jit/caffe2_operator.cpp

-56
This file was deleted.

torch/csrc/jit/caffe2_operator.h

-10
This file was deleted.

torch/csrc/jit/custom_operator.h

-9
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
#pragma once
22

3-
#include <torch/csrc/jit/caffe2_operator.h>
43
#include <torch/csrc/jit/operator.h>
54
#include <ATen/core/stack.h>
65
#include <torch/csrc/jit/tracer.h>
@@ -280,14 +279,6 @@ struct TORCH_API RegisterOperators {
280279
op(name, std::forward<Implementation>(implementation));
281280
}
282281

283-
/// Requires declaration of the FunctionSchema with
284-
/// REGISTER_FUNCTION_SCHEMA_OPERATOR(name, ...)
285-
static RegisterOperators Caffe2Operator(const std::string& name) {
286-
auto r = RegisterOperators();
287-
registerOperator(createOperatorFromCaffe2(name));
288-
return r;
289-
}
290-
291282
/// Creates a new operator from a name and implementation function (function
292283
/// pointer or function object/lambda) using `torch::jit::createOperator`, and
293284
/// then registers the operator.

torch/csrc/jit/register_caffe2_ops.cpp

-5
This file was deleted.

0 commit comments

Comments
 (0)