forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
types.py
59 lines (42 loc) · 1.52 KB
/
types.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import torch
from typing import Any, List, Sequence, Tuple, Union
import builtins
# Convenience aliases for common composite types that we need
# to talk about in PyTorch
_TensorOrTensors = Union[torch.Tensor, Sequence[torch.Tensor]]
# In some cases, these basic types are shadowed by corresponding
# top-level values. The underscore variants let us refer to these
# types. See https://github.com/python/mypy/issues/4146 for why these
# workarounds is necessary
_int = builtins.int
_float = builtins.float
_bool = builtins.bool
_dtype = torch.dtype
_device = torch.device
_qscheme = torch.qscheme
_size = Union[torch.Size, List[_int], Tuple[_int, ...]]
_layout = torch.layout
# Meta-type for "numeric" things; matches our docs
Number = Union[builtins.int, builtins.float, builtins.bool]
# Meta-type for "device-like" things. Not to be confused with 'device' (a
# literal device object). This nomenclature is consistent with PythonArgParser.
# None means use the default device (typically CPU)
Device = Union[_device, str, None]
# Storage protocol implemented by ${Type}StorageBase classes
class Storage(object):
_cdata: int
def __deepcopy__(self, memo) -> 'Storage':
...
def _new_shared(self, int) -> 'Storage':
...
def _write_file(self, f: Any, is_real_file: _bool, save_size: _bool) -> None:
...
def element_size(self) -> int:
...
def is_shared(self) -> bool:
...
def share_memory_(self) -> 'Storage':
...
def size(self) -> int:
...
...