File: test.py

package info (click to toggle)
halide 14.0.0-3
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 49,124 kB
  • sloc: cpp: 238,722; makefile: 4,303; python: 4,047; java: 1,575; sh: 1,384; pascal: 211; xml: 165; javascript: 43; ansic: 34
file content (78 lines) | stat: -rw-r--r-- 2,312 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
"""Verifies the Halide operator functions properly."""


import os
import unittest
import warnings

import torch as th
import modules


class TestAdd(unittest.TestCase):
    def setUp(self):
        self.a = th.ones(1, 2, 8, 8)
        self.b = th.ones(1, 2, 8, 8)*3
        self.gt = th.ones(1, 2, 8, 8)*4

    def test_cpu_single(self):
        self._test_add(is_double=False)

    def test_cpu_double(self):
        self._test_add(is_double=True)

    def test_gpu_single(self):
        if not th.cuda.is_available():
            return
        self._test_add(is_cuda=True, is_double=False)

    def test_gpu_double(self):
        if not th.cuda.is_available():
            return
        self._test_add(is_cuda=True, is_double=True)

    def _test_add(self, is_cuda=False, is_double=False):
        if is_double:
            self.a = self.a.double()
            self.b = self.b.double()
            self.gt = self.gt.double()
        if is_cuda:
            print("Testing Halide PyTorch CUDA operator...")
            self.a = self.a.cuda()
            self.b = self.b.cuda()
            self.gt = self.gt.cuda()
        else:
            print("Testing Halide PyTorch CPU operator...")

        for backward_op in ["add_grad", "add_halidegrad"]:
            add = modules.Add(backward_op)
            output = add(self.a, self.b)

            if is_double:
                print("  .Double-precision mode, backward_op:", backward_op)
            else:
                print("  .Single-precision mode, backward_op:", backward_op)

            diff = (output-self.gt).sum().item()
            assert diff == 0.0, "Test failed: sum should be 4, got %f" % diff

            self.a.requires_grad = True
            self.b.requires_grad = True

            for i in range(100):
                output = add(self.a, self.b).sum()
                output.backward()
            
            # Inputs are float, the gradient checker wants double inputs and
            # will issue a warning.
            warnings.filterwarnings(
                "ignore", module=r".*gradcheck*")

            # Test the gradient is correct
            res = th.autograd.gradcheck(add, [self.a, self.b], eps=1e-2)

            print("     Test ran successfully: difference is", diff)


if __name__ == "__main__":
    unittest.main()