File: test_logging.py

package info (click to toggle)
pytorch-cuda 2.6.0%2Bdfsg-7
  • links: PTS, VCS
  • area: contrib
  • in suites: forky, sid, trixie
  • size: 161,620 kB
  • sloc: python: 1,278,832; cpp: 900,322; ansic: 82,710; asm: 7,754; java: 3,363; sh: 2,811; javascript: 2,443; makefile: 597; ruby: 195; xml: 84; objc: 68
file content (33 lines) | stat: -rw-r--r-- 841 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# Owner(s): ["module: autograd"]

import logging

import torch
from torch.testing._internal.logging_utils import LoggingTestCase, make_logging_test


class TestAutogradLogging(LoggingTestCase):
    @make_logging_test(autograd=logging.DEBUG)
    def test_logging(self, records):
        a = torch.rand(10, requires_grad=True)
        b = a.mul(2).div(3).sum()
        c = b.clone()
        torch.autograd.backward((b, c))

        self.assertEqual(len(records), 5)
        expected = [
            "CloneBackward0",
            "SumBackward0",
            "DivBackward0",
            "MulBackward0",
            "AccumulateGrad",
        ]

        for i, record in enumerate(records):
            self.assertIn(expected[i], record.getMessage())


if __name__ == "__main__":
    from torch._dynamo.test_case import run_tests

    run_tests()