Skip to content

Commit

Permalink
Use vector instead of stack
Browse files Browse the repository at this point in the history
  • Loading branch information
WilliamJamieson committed Nov 1, 2023
1 parent 5770243 commit bbd7000
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 49 deletions.
3 changes: 1 addition & 2 deletions src/stcal/ramp_fitting/ols_cas22/_core.pxd
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from libcpp.vector cimport vector
from libcpp.stack cimport stack


cdef struct RampIndex:
Expand Down Expand Up @@ -53,5 +52,5 @@ cpdef enum RampJumpDQ:

cpdef float threshold(Thresh thresh, float slope)
cdef float get_power(float s)
cdef stack[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int index_pixel)
cpdef vector[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int index_pixel)
cpdef ReadPatternMetadata metadata_from_read_pattern(list[list[int]] read_pattern, float read_time)
41 changes: 7 additions & 34 deletions src/stcal/ramp_fitting/ols_cas22/_core.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ Functions
- cpdef gives a python wrapper, but the python version of this method
is considered private, only to be used for testing
"""
from libcpp.stack cimport stack
from libcpp.deque cimport deque
from libcpp.vector cimport vector
from libc.math cimport log10

import numpy as np
Expand Down Expand Up @@ -134,7 +133,7 @@ cpdef inline float threshold(Thresh thresh, float slope):

@cython.boundscheck(False)
@cython.wraparound(False)
cdef inline stack[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int index_pixel):
cpdef inline vector[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int index_pixel):
"""
Create the initial ramp stack for each pixel
if dq[index_resultant, index_pixel] == 0, then the resultant is in a ramp
Expand All @@ -151,11 +150,11 @@ cdef inline stack[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int inde
Returns
-------
stack of RampIndex objects
- stack with entry for each ramp found (top of stack is last ramp found)
vector of RampIndex objects
- vector with entry for each ramp found (last entry is last ramp found)
- RampIndex with start and end indices of the ramp in the resultants
"""
cdef stack[RampIndex] ramps = stack[RampIndex]()
cdef vector[RampIndex] ramps = vector[RampIndex]()

# Note: if start/end are -1, then no value has been assigned
# ramp.start == -1 means we have not started a ramp
Expand All @@ -181,44 +180,18 @@ cdef inline stack[RampIndex] init_ramps(int[:, :] dq, int n_resultants, int inde
ramp.end = index_resultant - 1

# Add completed ramp to stack and reset ramp
ramps.push(ramp)
ramps.push_back(ramp)
ramp = RampIndex(-1, -1)

# Handle case where last resultant is in ramp (so no end has been set)
if ramp.start != -1 and ramp.end == -1:
# Last resultant is end of the ramp => set then add to stack
ramp.end = n_resultants - 1
ramps.push(ramp)
ramps.push_back(ramp)

return ramps


def _init_ramps_list(np.ndarray[int, ndim=2] dq, int n_resultants, int index_pixel):
"""
This is a wrapper for init_ramps so that it can be fully inspected from pure
python. A cpdef cannot be used in that case becase a stack has no direct python
analog. Instead this function turns that stack into a list ordered in the same
order as the stack; meaning that, the first element of the list is the top of
the stack.
Note this function is for testing purposes only and so is marked as private
within this private module
"""
cdef stack[RampIndex] ramp = init_ramps(dq, n_resultants, index_pixel)

# Have to turn deque and stack into python compatible objects
cdef RampIndex index
cdef list out = []

out = []
while not ramp.empty():
index = ramp.top()
ramp.pop()
# So top of stack is first item of list
out = [index] + out

return out


@cython.boundscheck(False)
@cython.wraparound(False)
cpdef ReadPatternMetadata metadata_from_read_pattern(list[list[int]] read_pattern, float read_time):
Expand Down
4 changes: 2 additions & 2 deletions src/stcal/ramp_fitting/ols_cas22/_pixel.pxd
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from libcpp cimport bool
from libcpp.stack cimport stack
from libcpp.vector cimport vector

from stcal.ramp_fitting.ols_cas22._core cimport RampFit, RampFits, RampIndex
from stcal.ramp_fitting.ols_cas22._fixed cimport FixedValues
Expand All @@ -18,7 +18,7 @@ cdef class Pixel:
cdef float correction(Pixel self, RampIndex ramp, float slope)
cdef float stat(Pixel self, float slope, RampIndex ramp, int index, int diff)
cdef float[:] stats(Pixel self, float slope, RampIndex ramp)
cdef RampFits fit_ramps(Pixel self, stack[RampIndex] ramps, bool include_diagnostic)
cdef RampFits fit_ramps(Pixel self, vector[RampIndex] ramps, bool include_diagnostic)


cpdef Pixel make_pixel(FixedValues fixed, float read_noise, float [:] resultants)
15 changes: 7 additions & 8 deletions src/stcal/ramp_fitting/ols_cas22/_pixel.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ Functions
from libcpp cimport bool
from libc.math cimport sqrt, fabs
from libcpp.vector cimport vector
from libcpp.stack cimport stack

import numpy as np
cimport numpy as np
Expand Down Expand Up @@ -331,15 +330,15 @@ cdef class Pixel:
@cython.boundscheck(False)
@cython.wraparound(False)
@cython.cdivision(True)
cdef inline RampFits fit_ramps(Pixel self, stack[RampIndex] ramps, bool include_diagnostic):
cdef inline RampFits fit_ramps(Pixel self, vector[RampIndex] ramps, bool include_diagnostic):
"""
Compute all the ramps for a single pixel using the Casertano+22 algorithm
with jump detection.
Parameters
----------
ramps : stack[RampIndex]
Stack of initial ramps to fit for a single pixel
ramps : vector[RampIndex]
Vector of initial ramps to fit for a single pixel
multiple ramps are possible due to dq flags
Returns
Expand All @@ -362,8 +361,8 @@ cdef class Pixel:
# Run while the stack is non-empty
while not ramps.empty():
# Remove top ramp of the stack to use
ramp = ramps.top()
ramps.pop()
ramp = ramps.back()
ramps.pop_back()

# Compute fit
ramp_fit = self.fit_ramp(ramp)
Expand Down Expand Up @@ -426,7 +425,7 @@ cdef class Pixel:
# important that we exclude it.
# Note that jump0 < ramp.start is not possible because
# the argmax is always >= 0
ramps.push(RampIndex(ramp.start, jump0 - 1))
ramps.push_back(RampIndex(ramp.start, jump0 - 1))

if jump1 < ramp.end:
# Note that if jump1 == ramp.end, we have detected a
Expand All @@ -440,7 +439,7 @@ cdef class Pixel:
# resultants which are not considered part of the ramp
# under consideration. Therefore, we have to exlude all
# of those values.
ramps.push(RampIndex(jump1 + 1, ramp.end))
ramps.push_back(RampIndex(jump1 + 1, ramp.end))

continue

Expand Down
6 changes: 3 additions & 3 deletions tests/test_jump_cas22.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pytest
from numpy.testing import assert_allclose

from stcal.ramp_fitting.ols_cas22._core import metadata_from_read_pattern, threshold
from stcal.ramp_fitting.ols_cas22._core import init_ramps, metadata_from_read_pattern, threshold
from stcal.ramp_fitting.ols_cas22._fixed import fixed_values_from_metadata
from stcal.ramp_fitting.ols_cas22._pixel import make_pixel

Expand Down Expand Up @@ -82,15 +82,15 @@ def test_init_ramps():
a direct python equivalent, we call the wrapper for `init_ramps` which
converts that stack into a list ordered in the same fashion as the stack
"""
from stcal.ramp_fitting.ols_cas22._core import _init_ramps_list
# from stcal.ramp_fitting.ols_cas22._core import _init_ramps_list

dq = np.array([[0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1],
[0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1],
[0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1]], dtype=np.int32)

n_resultants, n_pixels = dq.shape
ramps = [_init_ramps_list(dq, n_resultants, index_pixel) for index_pixel in range(n_pixels)]
ramps = [init_ramps(dq, n_resultants, index_pixel) for index_pixel in range(n_pixels)]

assert len(ramps) == dq.shape[1] == 16

Expand Down

0 comments on commit bbd7000

Please sign in to comment.