Skip to content

Commit

Permalink
change back to old abs_deriv
Browse files Browse the repository at this point in the history
  • Loading branch information
braingram committed Jul 9, 2024
1 parent 1ac2b23 commit ad63436
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 6 deletions.
28 changes: 26 additions & 2 deletions src/stcal/outlier_detection/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,33 @@ def compute_weight_threshold(weight, maskpt):
return weight_threshold


def _abs_deriv(array):
def _valid_abs_deriv(array):
"""Take the absolute derivate of a numpy array."""
# TODO is there a more efficient way to do this?
out = np.zeros_like(array) # use same dtype as input

Check warning on line 78 in src/stcal/outlier_detection/utils.py

View check run for this annotation

Codecov / codecov/patch

src/stcal/outlier_detection/utils.py#L78

Added line #L78 was not covered by tests

# compute row-wise absolute diffference
d = np.abs(np.diff(array, axis=0))
out[1:] = d # no need to do max yet

Check warning on line 82 in src/stcal/outlier_detection/utils.py

View check run for this annotation

Codecov / codecov/patch

src/stcal/outlier_detection/utils.py#L81-L82

Added lines #L81 - L82 were not covered by tests
# since these are absolute differences |r0-r1| = |r1-r0|
# make a view of the target portion of the array
v = out[:-1]

Check warning on line 85 in src/stcal/outlier_detection/utils.py

View check run for this annotation

Codecov / codecov/patch

src/stcal/outlier_detection/utils.py#L85

Added line #L85 was not covered by tests
# compute an in-place maximum
np.putmask(v, d > v, d)

Check warning on line 87 in src/stcal/outlier_detection/utils.py

View check run for this annotation

Codecov / codecov/patch

src/stcal/outlier_detection/utils.py#L87

Added line #L87 was not covered by tests

# compute col-wise absolute difference
d = np.abs(np.diff(array, axis=1))
v = out[:, 1:]
np.putmask(v, d > v, d)
v = out[:, :-1]
np.putmask(v, d > v, d)
return out

Check warning on line 95 in src/stcal/outlier_detection/utils.py

View check run for this annotation

Codecov / codecov/patch

src/stcal/outlier_detection/utils.py#L90-L95

Added lines #L90 - L95 were not covered by tests


def _abs_deriv(array):
# FIXME this assumes off-edge pixels are 0
# FIXME this upcasts to float64
# FIXME _valid_abs_deriv fixes the above issues and is more efficient
# but fixing the bugs will likely change the output
tmp = np.zeros(array.shape, dtype=np.float64)
out = np.zeros(array.shape, dtype=np.float64)

Expand Down
16 changes: 12 additions & 4 deletions tests/outlier_detection/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,28 @@


@pytest.mark.parametrize("shape,diff", [
([5, 5], 100),
([7, 7], 200),
([5, 7], 100),
([17, 13], -200),
])
def test_abs_deriv(shape, diff):
def test_abs_deriv_single_value(shape, diff):
arr = np.zeros(shape)
# put diff at the center
np.put(arr, arr.size // 2, diff)
# since abs_deriv with a single non-zero value is the same as a
# convolution with a 3x3 cross kernel use it to test the result
expected = scipy.signal.convolve2d(arr, [[0, 1, 0], [1, 1, 1], [0, 1, 0]], mode='same')
expected = scipy.signal.convolve2d(np.abs(arr), [[0, 1, 0], [1, 1, 1], [0, 1, 0]], mode='same')
result = _abs_deriv(arr)
np.testing.assert_allclose(result, expected)


@pytest.mark.skip(reason="_abs_deriv has edge effects due to treating off-edge pixels as 0")
@pytest.mark.parametrize("nrows,ncols", [(5, 5), (7, 11), (17, 13)])
def test_abs_deriv_range(nrows, ncols):
arr = np.arange(nrows * ncols).reshape(nrows, ncols)
result = _abs_deriv(arr)
np.testing.assert_allclose(result, ncols)

Check warning on line 34 in tests/outlier_detection/test_utils.py

View check run for this annotation

Codecov / codecov/patch

tests/outlier_detection/test_utils.py#L32-L34

Added lines #L32 - L34 were not covered by tests


@pytest.mark.parametrize("shape,mean,maskpt,expected", [
([5, 5], 11, 0.5, 5.5),
([5, 5], 11, 0.25, 2.75),
Expand Down

0 comments on commit ad63436

Please sign in to comment.