|
23 | 23 | softmax_helper = np.exp(softmax_helper) / np.sum(np.exp(softmax_helper))
|
24 | 24 | softmax_helper2 = np.array((softmax_helper, softmax_helper)).reshape(2, -1)
|
25 | 25 |
|
| 26 | +# Here, we use RAND1 * .5 as threshold so that the expected inputs to be masked is 50%. |
| 27 | +softmax_threshold_helper = RAND1 * test_var |
| 28 | +softmax_threshold_helper = np.where(np.abs(softmax_threshold_helper) > RAND1 * .5, softmax_threshold_helper, -np.inf) |
| 29 | +if np.any(softmax_threshold_helper != -np.inf): |
| 30 | + softmax_threshold_helper = softmax_threshold_helper - np.max(softmax_threshold_helper) |
| 31 | +softmax_threshold_helper = np.exp(softmax_threshold_helper) |
| 32 | +if np.any(softmax_threshold_helper): |
| 33 | + softmax_threshold_helper = softmax_threshold_helper / np.sum(softmax_threshold_helper) |
| 34 | +softmax_threshold_helper2 = np.array((softmax_threshold_helper, softmax_threshold_helper)).reshape(2, -1) |
| 35 | + |
| 36 | + |
26 | 37 | tanh_helper = (RAND1 * (test_var + RAND2 - RAND3) + RAND4)
|
27 | 38 | tanh_helper = np.tanh(tanh_helper)
|
28 | 39 |
|
@@ -106,6 +117,77 @@ def binomial_distort_helper(seed):
|
106 | 117 | pytest.param(pnl.SoftMax, [test_var, test_var], {kw.GAIN:RAND1, kw.OUTPUT_TYPE:kw.MAX_INDICATOR, kw.PER_ITEM: True},
|
107 | 118 | np.where(softmax_helper2 == np.max(softmax_helper2), 1, 0), id="SOFT_MAX MAX_INDICATOR PER_ITEM"),
|
108 | 119 |
|
| 120 | + # SoftMax with mask_threshold 1D input |
| 121 | + pytest.param(pnl.SoftMax, test_var, |
| 122 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.PER_ITEM:False}, |
| 123 | + softmax_threshold_helper, id="SOFT_MAX MASK_THRESHOLD ALL", |
| 124 | + marks=pytest.mark.llvm_not_implemented), |
| 125 | + pytest.param(pnl.SoftMax, test_var, |
| 126 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX, kw.PER_ITEM:False}, |
| 127 | + np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), softmax_threshold_helper, 0), |
| 128 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX", marks=pytest.mark.llvm_not_implemented), |
| 129 | + pytest.param(pnl.SoftMax, test_var, |
| 130 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX_INDICATOR, kw.PER_ITEM:False}, |
| 131 | + np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), 1, 0), |
| 132 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX_INDICATOR", marks=pytest.mark.llvm_not_implemented), |
| 133 | + pytest.param(pnl.SoftMax, test_var, |
| 134 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_VAL, kw.PER_ITEM:False}, |
| 135 | + np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), softmax_threshold_helper, 0), |
| 136 | + id="SOFT_MAX MASK_THRESHOLD MAX_VAL", marks=pytest.mark.llvm_not_implemented), |
| 137 | + pytest.param(pnl.SoftMax, test_var, |
| 138 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_INDICATOR, kw.PER_ITEM:False}, |
| 139 | + np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), 1, 0), |
| 140 | + id="SOFT_MAX MASK_THRESHOLD MAX_INDICATOR", marks=pytest.mark.llvm_not_implemented), |
| 141 | + pytest.param(pnl.SoftMax, test_var, |
| 142 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.PROB, kw.PER_ITEM:False}, |
| 143 | + [0.0, 0.0, 0.0, test_var[3], test_var[4], 0.0, 0.0, 0.0, 0.0, 0.0], |
| 144 | + id="SOFT_MAX MASK_THRESHOLD PROB", marks=pytest.mark.llvm_not_implemented), |
| 145 | + # |
| 146 | + # # SoftMax 2D threshold testing per-item |
| 147 | + pytest.param(pnl.SoftMax, [test_var], |
| 148 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.PER_ITEM:True}, [softmax_threshold_helper], |
| 149 | + id="SOFT_MAX MASK_THRESHOLD ALL 2D", marks=pytest.mark.llvm_not_implemented), |
| 150 | + pytest.param(pnl.SoftMax, [test_var], |
| 151 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX, kw.PER_ITEM:True}, |
| 152 | + [np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), softmax_threshold_helper, 0)], |
| 153 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX 2D", marks=pytest.mark.llvm_not_implemented), |
| 154 | + pytest.param(pnl.SoftMax, [test_var], |
| 155 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX_INDICATOR, kw.PER_ITEM:True}, |
| 156 | + [np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), 1, 0)], |
| 157 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX_INDICATOR 2D", marks=pytest.mark.llvm_not_implemented), |
| 158 | + pytest.param(pnl.SoftMax, [test_var], |
| 159 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_VAL, kw.PER_ITEM:True}, |
| 160 | + [np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), softmax_threshold_helper, 0)], |
| 161 | + id="SOFT_MAX MASK_THRESHOLD MAX_VAL 2D", marks=pytest.mark.llvm_not_implemented), |
| 162 | + pytest.param(pnl.SoftMax, [test_var], |
| 163 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_INDICATOR, kw.PER_ITEM:True}, |
| 164 | + [np.where(softmax_threshold_helper == np.max(softmax_threshold_helper), 1, 0)], |
| 165 | + id="SOFT_MAX MASK_THRESHOLD MAX_INDICATOR 2D", marks=pytest.mark.llvm_not_implemented), |
| 166 | + pytest.param(pnl.SoftMax, [test_var], |
| 167 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.PROB, kw.PER_ITEM:True}, |
| 168 | + [[0.0, 0.0, 0.0, test_var[3], test_var[4], 0.0, 0.0, 0.0, 0.0, 0.0]], |
| 169 | + id="SOFT_MAX MASK_THRESHOLD PROB 2D", marks=pytest.mark.llvm_not_implemented), |
| 170 | + |
| 171 | + # SoftMax threshold per-item with 2 elements in input |
| 172 | + pytest.param(pnl.SoftMax, [test_var, test_var], |
| 173 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.PER_ITEM:True}, softmax_threshold_helper2, |
| 174 | + id="SOFT_MAX MASK_THRESHOLD ALL 2D", marks=pytest.mark.llvm_not_implemented), |
| 175 | + pytest.param(pnl.SoftMax, [test_var, test_var], {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX, kw.PER_ITEM:True}, |
| 176 | + np.where(softmax_threshold_helper2 == np.max(softmax_threshold_helper2), softmax_threshold_helper2, 0), |
| 177 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX 2D", marks=pytest.mark.llvm_not_implemented), |
| 178 | + pytest.param(pnl.SoftMax, [test_var, test_var], |
| 179 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:pnl.ARG_MAX_INDICATOR, kw.PER_ITEM:True}, |
| 180 | + np.where(softmax_threshold_helper2 == np.max(softmax_threshold_helper2), 1, 0), |
| 181 | + id="SOFT_MAX MASK_THRESHOLD ARG_MAX_INDICATOR 2D", marks=pytest.mark.llvm_not_implemented), |
| 182 | + pytest.param(pnl.SoftMax, [test_var, test_var], |
| 183 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_VAL, kw.PER_ITEM:True}, |
| 184 | + np.where(softmax_threshold_helper == np.max(softmax_threshold_helper2), softmax_threshold_helper2, 0), |
| 185 | + id="SOFT_MAX MASK_THRESHOLD MAX_VAL 2D", marks=pytest.mark.llvm_not_implemented), |
| 186 | + pytest.param(pnl.SoftMax, [test_var, test_var], |
| 187 | + {kw.GAIN:RAND1, 'mask_threshold': RAND1 * .5, kw.OUTPUT_TYPE:kw.MAX_INDICATOR, kw.PER_ITEM:True}, |
| 188 | + np.where(softmax_threshold_helper2 == np.max(softmax_threshold_helper2), 1, 0), |
| 189 | + id="SOFT_MAX MASK_THRESHOLD MAX_INDICATOR 2D", marks=pytest.mark.llvm_not_implemented), |
| 190 | + |
109 | 191 | # Linear Matrix
|
110 | 192 | pytest.param(pnl.MatrixTransform, test_var, {kw.MATRIX:test_matrix}, np.dot(test_var, test_matrix), id="LINEAR_MATRIX SQUARE"),
|
111 | 193 | pytest.param(pnl.MatrixTransform, test_var, {kw.MATRIX:test_matrix_l}, np.dot(test_var, test_matrix_l), id="LINEAR_MATRIX WIDE"),
|
|
0 commit comments