File: normalizer.py

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (44 lines) | stat: -rw-r--r-- 1,411 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# @package optimizer
# Module caffe2.python.normalizer



class Normalizer(object):
    def __init__(self):
        pass
    """
    Adds normalization to train_net for given parameter. Its factor ahead of
    regularization is given when initialization.
    The param should be a BlobReference.
    """

    def __call__(self, net, param):
        return self._run(net, param)

    def _run(self, net, param):
        raise Exception("Not Impelemented")


class BatchNormalizer(Normalizer):
    def __init__(self, momentum, scale_init_value=1.0):
        super(BatchNormalizer, self).__init__()
        self._momentum = float(momentum)
        self._scale_init_value = float(scale_init_value)

    def _run(self, layer_model, param):
        return layer_model.BatchNormalization(
            param, momentum=self._momentum, scale_init_value=self._scale_init_value
        )


class LayerNormalizer(Normalizer):
    def __init__(self, epsilon, use_layer_norm_op=True, scale_init_value=1.0):
        super(LayerNormalizer, self).__init__()
        self._epsilon = float(epsilon)
        self._use_layer_norm_op = use_layer_norm_op
        self._scale_init_value = float(scale_init_value)

    def _run(self, layer_model, param):
        return layer_model.LayerNormalization(
            param, epsilon=self._epsilon, use_layer_norm_op=self._use_layer_norm_op, scale_init_value=self._scale_init_value
        )