File: elementwise_linear.py

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (46 lines) | stat: -rw-r--r-- 1,385 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
## @package elementwise_linear
# Module caffe2.python.helpers.elementwise_linear





from caffe2.python import core
from caffe2.python.modeling.parameter_info import ParameterTags


def _elementwise_linear(
    model, op_call, blob_in, blob_out, dim,
    weight_init=None, bias_init=None, **kwargs
):
    """Elementwise_Linear"""
    weight_init = weight_init or ('ConstantFill', {'value': 1.0})
    bias_init = bias_init or ('ConstantFill', {'value': 0.0})
    blob_out = blob_out or model.net.NextName()
    if model.init_params:
        weight = model.param_init_net.__getattr__(weight_init[0])(
            [],
            blob_out + '_w',
            shape=[dim],
            **weight_init[1]
        )
        bias = model.param_init_net.__getattr__(bias_init[0])(
            [],
            blob_out + '_b',
            shape=[dim],
            **bias_init[1]
        )
    else:
        weight = core.ScopedBlobReference(
            blob_out + '_w', model.param_init_net)
        bias = core.ScopedBlobReference(
            blob_out + '_b', model.param_init_net)

    model.AddParameter(weight, ParameterTags.WEIGHT)
    model.AddParameter(bias, ParameterTags.BIAS)
    return op_call([blob_in, weight, bias], blob_out, **kwargs)


def elementwise_linear(model, *args, **kwargs):
    return _elementwise_linear(
        model, model.net.ElementwiseLinear, *args, **kwargs)