1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
|
from typing import Dict, List, Optional, Tuple
import torch
from torch import Tensor
from torch_geometric.data import FeatureStore, TensorAttr
from torch_geometric.typing import FeatureTensorType
KeyType = Tuple[Optional[str], Optional[str]]
class MyFeatureStore(FeatureStore):
def __init__(self) -> None:
super().__init__()
self.store: Dict[KeyType, Tuple[Tensor, Tensor]] = {}
@staticmethod
def key(attr: TensorAttr) -> KeyType:
return (attr.group_name, attr.attr_name)
def _put_tensor(self, tensor: FeatureTensorType, attr: TensorAttr) -> bool:
index = attr.index
# None indices define the obvious index:
if index is None:
index = torch.arange(0, tensor.shape[0])
# Store the index:
assert isinstance(index, Tensor)
assert isinstance(tensor, Tensor)
self.store[self.key(attr)] = (index, tensor)
return True
def _get_tensor(self, attr: TensorAttr) -> Optional[Tensor]:
index, tensor = self.store.get(self.key(attr), (None, None))
if tensor is None:
raise KeyError(f"Could not find tensor for '{attr}'")
assert isinstance(tensor, Tensor)
# None indices return the whole tensor:
if attr.index is None:
return tensor
# Empty slices return the whole tensor:
if (isinstance(attr.index, slice)
and attr.index == slice(None, None, None)):
return tensor
assert isinstance(attr.index, Tensor)
if attr.index.numel() == 0:
return tensor[attr.index]
idx = torch.cat([(index == v).nonzero() for v in attr.index]).view(-1)
return tensor[idx]
def _remove_tensor(self, attr: TensorAttr) -> bool:
return self.store.pop(self.key(attr), None) is not None
def _get_tensor_size(self, attr: TensorAttr) -> Optional[Tuple[int, ...]]:
tensor = self._get_tensor(attr)
return tensor.size() if tensor is not None else None
def get_all_tensor_attrs(self) -> List[TensorAttr]:
return [self._tensor_attr_cls.cast(*key) for key in self.store.keys()]
|