File: backend.py

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (72 lines) | stat: -rw-r--r-- 2,009 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import argparse
import os.path
import sys
import torch


def get_custom_backend_library_path():
    """
    Get the path to the library containing the custom backend.

    Return:
        The path to the custom backend object, customized by platform.
    """
    if sys.platform.startswith("win32"):
        library_filename = "custom_backend.dll"
    elif sys.platform.startswith("darwin"):
        library_filename = "libcustom_backend.dylib"
    else:
        library_filename = "libcustom_backend.so"
    path = os.path.abspath("build/{}".format(library_filename))
    assert os.path.exists(path), path
    return path


def to_custom_backend(module):
    """
    This is a helper that wraps torch._C._jit_to_test_backend and compiles
    only the forward method with an empty compile spec.

    Args:
        module: input ScriptModule.

    Returns:
        The module, lowered so that it can run on TestBackend.
    """
    lowered_module = torch._C._jit_to_backend("custom_backend", module, {"forward": {"": ""}})
    return lowered_module


class Model(torch.nn.Module):
    """
    Simple model used for testing that to_backend API supports saving, loading,
    and executing in C++.
    """

    def __init__(self):
        super(Model, self).__init__()

    def forward(self, a, b):
        return (a + b, a - b)


def main():
    parser = argparse.ArgumentParser(
        description="Lower a Module to a custom backend"
    )
    parser.add_argument("--export-module-to", required=True)
    options = parser.parse_args()

    # Load the library containing the custom backend.
    library_path = get_custom_backend_library_path()
    torch.ops.load_library(library_path)
    assert library_path in torch.ops.loaded_libraries

    # Lower an instance of Model to the custom backend  and export it
    # to the specified location.
    lowered_module = to_custom_backend(torch.jit.script(Model()))
    torch.jit.save(lowered_module, options.export_module_to)


if __name__ == "__main__":
    main()