|
5 | 5 | =========================================== |
6 | 6 | """ |
7 | 7 |
|
| 8 | +import warnings |
| 9 | + |
| 10 | +import numpy as np |
| 11 | + |
8 | 12 | ###################################################################### |
9 | | -# |
| 13 | +# |
10 | 14 | # Before working on this tutorial, please make sure to review our |
11 | 15 | # `MaskedTensor Overview tutorial <https://pytorch.org/tutorials/prototype/maskedtensor_overview.html>`. |
12 | 16 | # |
13 | 17 | # The purpose of this tutorial is to help users understand how some of the advanced semantics work |
14 | 18 | # and how they came to be. We will focus on two particular ones: |
15 | 19 | # |
16 | | -# *. Differences between MaskedTensor and `NumPy's MaskedArray <https://numpy.org/doc/stable/reference/maskedarray.html>`__ |
| 20 | +# *. Differences between MaskedTensor and `NumPy's MaskedArray <https://numpy.org/doc/stable/reference/maskedarray.html>`__ |
17 | 21 | # *. Reduction semantics |
18 | 22 | # |
19 | 23 | # Preparation |
|
22 | 26 |
|
23 | 27 | import torch |
24 | 28 | from torch.masked import masked_tensor |
25 | | -import numpy as np |
26 | | -import warnings |
27 | 29 |
|
28 | 30 | # Disable prototype warnings and such |
29 | | -warnings.filterwarnings(action='ignore', category=UserWarning) |
| 31 | +warnings.filterwarnings(action="ignore", category=UserWarning) |
30 | 32 |
|
31 | 33 | ###################################################################### |
32 | 34 | # MaskedTensor vs NumPy's MaskedArray |
|
43 | 45 | # `apply the logical_or operator <https://github.com/numpy/numpy/blob/68299575d8595d904aff6f28e12d21bf6428a4ba/numpy/ma/core.py#L1016-L1024>`__. |
44 | 46 | # |
45 | 47 |
|
46 | | -data = torch.arange(5.) |
| 48 | +data = torch.arange(5.0) |
47 | 49 | mask = torch.tensor([True, True, False, True, False]) |
48 | 50 | npm0 = np.ma.masked_array(data.numpy(), (~mask).numpy()) |
49 | 51 | npm1 = np.ma.masked_array(data.numpy(), (mask).numpy()) |
|
65 | 67 | try: |
66 | 68 | mt0 + mt1 |
67 | 69 | except ValueError as e: |
68 | | - print ("mt0 + mt1 failed. Error: ", e) |
| 70 | + print("mt0 + mt1 failed. Error: ", e) |
69 | 71 |
|
70 | 72 | ###################################################################### |
71 | 73 | # However, if this behavior is desired, MaskedTensor does support these semantics by giving access to the data and masks |
|
125 | 127 | # In other words, why don't we use the same semantics as ``np.ma.masked_array``? Consider the following example: |
126 | 128 | # |
127 | 129 |
|
128 | | -data0 = torch.arange(10.).reshape(2, 5) |
129 | | -data1 = torch.arange(10.).reshape(2, 5) + 10 |
130 | | -mask0 = torch.tensor([[True, True, False, False, False], [False, False, False, True, True]]) |
131 | | -mask1 = torch.tensor([[False, False, False, True, True], [True, True, False, False, False]]) |
| 130 | +data0 = torch.arange(10.0).reshape(2, 5) |
| 131 | +data1 = torch.arange(10.0).reshape(2, 5) + 10 |
| 132 | +mask0 = torch.tensor( |
| 133 | + [[True, True, False, False, False], [False, False, False, True, True]] |
| 134 | +) |
| 135 | +mask1 = torch.tensor( |
| 136 | + [[False, False, False, True, True], [True, True, False, False, False]] |
| 137 | +) |
132 | 138 | npm0 = np.ma.masked_array(data0.numpy(), (mask0).numpy()) |
133 | 139 | npm1 = np.ma.masked_array(data1.numpy(), (mask1).numpy()) |
134 | 140 |
|
|
155 | 161 | mt0 = masked_tensor(data0, ~mask0) |
156 | 162 | mt1 = masked_tensor(data1, ~mask1) |
157 | 163 |
|
158 | | -(mt0.to_tensor(0) + mt1.to_tensor(0)).sum(0) |
| 164 | +print((mt0.to_tensor(0) + mt1.to_tensor(0)).sum(0)) |
159 | 165 |
|
160 | 166 | ###################################################################### |
161 | 167 | # Conclusion |
|
167 | 173 | # the associative property amongst binary operations), which in turn can necessitate the user |
168 | 174 | # to be more intentional with their code at times, but we believe this to be the better move. |
169 | 175 | # If you have any thoughts on this, please `let us know <https://github.com/pytorch/pytorch/issues>`__! |
170 | | -# |
|
0 commit comments