File: test_resume.py

package info (click to toggle)
pytorch-cuda 2.6.0%2Bdfsg-7
  • links: PTS, VCS
  • area: contrib
  • in suites: trixie
  • size: 161,620 kB
  • sloc: python: 1,278,832; cpp: 900,322; ansic: 82,710; asm: 7,754; java: 3,363; sh: 2,811; javascript: 2,443; makefile: 597; ruby: 195; xml: 84; objc: 68
file content (38 lines) | stat: -rw-r--r-- 912 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# Owner(s): ["module: dynamo"]

import torch
import torch._dynamo.test_case


def fn_creator():
    var1 = 1

    def fn(x):
        x = x + 1
        var2 = 1
        torch._dynamo.graph_break()
        x = x + var1

        def inner_fn():
            return var2

        return x

    return fn


class ResumeFunctionTests(torch._dynamo.test_case.TestCase):
    def test_freevars(self):
        fn = fn_creator()
        opt_fn = torch.compile(fn, backend="eager")
        opt_fn(torch.randn(10))
        codes = [v for k, v in list(globals().items()) if k.startswith("__resume_at")]
        self.assertEqual(len(codes), 1)
        # co_freevars of resume functions, are sorted concatenation of the original function's co_freevars and co_cellvars
        self.assertEqual(codes[0].co_freevars, ("var1", "var2"))


if __name__ == "__main__":
    from torch._dynamo.test_case import run_tests

    run_tests()