File: parameter_info.py

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (54 lines) | stat: -rw-r--r-- 1,438 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54





from caffe2.python import core

import numpy as np


class ParameterTags(object):
    BIAS = 'BIAS'
    WEIGHT = 'WEIGHT'
    COMPUTED_PARAM = 'COMPUTED_PARAM'


class ParameterInfo(object):

    def __init__(
            self, param_id, param, key=None, shape=None, length=None,
            grad=None, blob_copy=None):
        assert isinstance(param, core.BlobReference)
        self.param_id = param_id
        self.name = str(param)
        self.blob = param
        self.key = key
        self.shape = shape
        self.size = None if shape is None else np.prod(shape)
        self.length = max(1, length if length is not None else 1)
        self.grad = grad
        self._cloned_init_net = None
        # Optionally store equivalent copies of the blob
        # in different precisions (i.e. half and float copies)
        # stored as a dict of TensorProto.DataType -> BlobReference
        self.blob_copy = blob_copy
        # each param_info can have its own optimizer. It can be set within
        # OptimizerContext (caffe2/python/optimizer.py)
        self._optimizer = None

    @property
    def parameter(self):
        return self.blob

    @property
    def optimizer(self):
        return self._optimizer

    @optimizer.setter
    def optimizer(self, value):
        assert self._optimizer is None, "optimizer has already been set"
        self._optimizer = value

    def __str__(self):
        return self.name