1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
|
#include <gtest/gtest.h>
#include <test/cpp/jit/test_utils.h>
#include <torch/csrc/jit/operator_upgraders/upgraders.h>
#include <torch/csrc/jit/operator_upgraders/version_map.h>
#include <torch/csrc/jit/passes/replacement_of_old_operators.h>
#include <memory>
namespace torch {
namespace jit {
std::unordered_map<std::string, std::string> test_upgraders(
{{"_test_serialization_subcmul_0_2", R"IR(graph(%self.1 : Tensor,
%other.1 : Tensor,
%alpha.1 : Union(float, int)):
%7 : int = prim::Constant[value=1]()
%6 : Tensor = aten::mul(%self.1, %alpha.1) # torch/jit/operator_upgraders.py:18:20
%8 : Tensor = aten::sub(%other.1, %6, %7) # torch/jit/operator_upgraders.py:18:11
return (%8))IR"},
{"div_Tensor_0_3", R"IR(graph(%self.1 : Tensor,
%other.1 : Tensor):
%32 : str = prim::Constant[value="trunc"]()
%6 : bool = prim::Constant[value=1]()
%4 : bool = aten::is_floating_point(%self.1)
%11 : bool = prim::If(%4)
block0():
-> (%6)
block1():
%9 : bool = aten::is_floating_point(%other.1)
-> (%9)
%35 : Tensor = prim::If(%11)
block0():
%36 : Tensor = aten::div(%self.1, %other.1)
-> (%36)
block1():
%37 : Tensor = aten::div(%self.1, %other.1, %32)
-> (%37)
return (%35))IR"}});
TEST(OpReplacementTest, ReplaceDivInSimpleFunction) {
const auto graph_string = R"IR(
graph(%0 : Tensor,
%1 : Tensor):
%2 : Tensor = aten::add(%0, %1)
%3 : Tensor = aten::div(%2, %1)
return (%3))IR";
auto g = std::make_shared<Graph>();
test_only_populate_upgraders(test_upgraders);
torch::jit::parseIR(graph_string, g.get());
g->set_op_version(2);
ReplaceOldOperatorsWithUpgraders(g);
testing::FileCheck()
.check("prim::If")
->check_count("aten::div(%2, %1)", 1, /*exactly=*/true)
->check_count("aten::div(%2, %1, %4)", 1, /*exactly=*/true)
->run(*g);
}
TEST(OpReplacementTest, ReplaceTwoOpsInSimpleFunction) {
const auto graph_string = R"IR(
graph(%0 : Tensor,
%1 : Tensor):
%2 : Tensor = aten::add(%0, %1)
%3 : Tensor = aten::div(%2, %1)
%4 : int = prim::Constant[value=1]()
%5: Tensor = aten::_test_serialization_subcmul(%0, %1, %4)
return (%3, %5))IR";
auto g = std::make_shared<Graph>();
test_only_populate_upgraders(test_upgraders);
UpgraderEntry test_entry{
3,
"_test_serialization_subcmul_0_2",
"aten::_test_serialization_subcmul(Tensor self, Tensor other, Scalar alpha=2) -> Tensor"};
test_only_add_entry("aten::_test_serialization_subcmul", test_entry);
torch::jit::parseIR(graph_string, g.get());
g->set_op_version(2);
ReplaceOldOperatorsWithUpgraders(g);
testing::FileCheck()
.check("prim::If")
->check_count("aten::div", 2, /*exactly=*/true)
->run(*g);
test_only_remove_entry("aten::_test_serialization_subcmul");
test_only_remove_upgraders(test_upgraders);
}
TEST(OpReplacementTest, ReplaceDivInNestedFunction) {
const auto graph_string = R"IR(
graph(%0 : Tensor,
%1 : Tensor,
%8 : bool):
%9 : bool = prim::Constant[value=1]()
%7 : bool = prim::If(%8)
block0():
-> (%9)
block1():
%2 : Tensor = aten::add(%0, %1)
%3 : Tensor = aten::div(%2, %1)
%4 : Tensor = aten::add(%3, %0)
%10 : bool = aten::is_floating_point(%4)
-> (%10)
return (%7))IR";
auto g = std::make_shared<Graph>();
test_only_populate_upgraders(test_upgraders);
torch::jit::parseIR(graph_string, g.get());
g->set_op_version(2);
ReplaceOldOperatorsWithUpgraders(g);
testing::FileCheck()
.check("prim::If")
->check_count("aten::add", 2, false)
->run(*g);
testing::FileCheck()
.check("prim::If")
->check_count("aten::div", 2, false)
->run(*g);
test_only_remove_upgraders(test_upgraders);
}
TEST(OpReplacementTest, ReplaceTestSubcmulInSimpleFunction) {
const auto graph_string = R"IR(
graph(%0 : Tensor,
%1 : Tensor):
%3 : int = prim::Constant[value=1]()
%2 : Tensor = aten::_test_serialization_subcmul(%0, %1, %3)
return (%2))IR";
auto g = std::make_shared<Graph>();
test_only_populate_upgraders(test_upgraders);
UpgraderEntry test_entry{
3,
"_test_serialization_subcmul_0_2",
"aten::_test_serialization_subcmul(Tensor self, Tensor other, Scalar alpha=2) -> Tensor"};
test_only_add_entry("aten::_test_serialization_subcmul", test_entry);
torch::jit::parseIR(graph_string, g.get());
g->set_op_version(2);
ReplaceOldOperatorsWithUpgraders(g);
testing::FileCheck().check_count("aten::mul", 1, false)->run(*g);
testing::FileCheck().check_count("aten::sub", 1, false)->run(*g);
test_only_remove_upgraders(test_upgraders);
test_only_remove_entry("aten::_test_serialization_subcmul");
}
} // namespace jit
} // namespace torch
|