File: test_fx_to_onnx_decomp_skip.py

package info (click to toggle)
pytorch-cuda 2.6.0%2Bdfsg-7
  • links: PTS, VCS
  • area: contrib
  • in suites: forky, sid, trixie
  • size: 161,620 kB
  • sloc: python: 1,278,832; cpp: 900,322; ansic: 82,710; asm: 7,754; java: 3,363; sh: 2,811; javascript: 2,443; makefile: 597; ruby: 195; xml: 84; objc: 68
file content (76 lines) | stat: -rw-r--r-- 3,114 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# Owner(s): ["module: onnx"]
from __future__ import annotations

import onnx
import onnx.inliner

import pytorch_test_common

import torch
from torch.testing._internal import common_utils


def assert_op_in_onnx_model(model: onnx.ModelProto, op_type: str):
    inlined = onnx.inliner.inline_local_functions(model)
    for node in inlined.graph.node:
        if node.op_type == op_type:
            return
    raise AssertionError(f"Op {op_type} not found in model")


class TestDynamoExportDecompSkip(pytorch_test_common.ExportTestCase):
    def test_upsample_bilinear2d(self):
        class TestModel(torch.nn.Module):
            def __init__(self) -> None:
                super().__init__()
                self.upsample = torch.nn.Upsample(scale_factor=2, mode="bilinear")

            def forward(self, x):
                return self.upsample(x)

        onnx_program = torch.onnx.dynamo_export(TestModel(), torch.randn(1, 1, 2, 2))
        # If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
        assert_op_in_onnx_model(onnx_program.model_proto, "Resize")

    def test_upsample_bilinear2d_output_size(self):
        def func(x: torch.Tensor):
            return torch.nn.functional.interpolate(x, size=(4, 4), mode="bilinear")

        onnx_program = torch.onnx.dynamo_export(func, torch.randn(1, 1, 2, 2))
        # If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
        assert_op_in_onnx_model(onnx_program.model_proto, "Resize")

    def test_upsample_trilinear3d(self):
        class TestModel(torch.nn.Module):
            def __init__(self) -> None:
                super().__init__()
                self.upsample = torch.nn.Upsample(scale_factor=2, mode="trilinear")

            def forward(self, x):
                return self.upsample(x)

        onnx_program = torch.onnx.dynamo_export(TestModel(), torch.randn(1, 1, 2, 2, 3))
        # If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
        assert_op_in_onnx_model(onnx_program.model_proto, "Resize")

    def test_upsample_trilinear3d_output_size(self):
        def func(x: torch.Tensor):
            return torch.nn.functional.interpolate(x, size=(4, 4, 4), mode="trilinear")

        onnx_program = torch.onnx.dynamo_export(func, torch.randn(1, 1, 2, 2, 3))
        # If decomposition is skipped, the model will contain a Resize op instead of fine grained subgraph.
        assert_op_in_onnx_model(onnx_program.model_proto, "Resize")

    def test_instance_norm(self):
        class TestModel(torch.nn.Module):
            def forward(self, x):
                return torch.nn.functional.instance_norm(x)

        onnx_program = torch.onnx.dynamo_export(TestModel(), torch.randn(1, 1, 2, 2))
        # If decomposition is skipped, the model will contain an InstanceNormalization op
        # instead of BatchNormalization op w/ training=True.
        assert_op_in_onnx_model(onnx_program.model_proto, "InstanceNormalization")


if __name__ == "__main__":
    common_utils.run_tests()