Skip to content

Commit

Permalink
Merge pull request #122 from usnistgov/121-bug-sst1rsoxsdb-scan-hinti…
Browse files Browse the repository at this point in the history
…ng-breaks-if-a-scanned-axis-has-negative-value

`SST1RSoXSDB`: fix bug described in  #121
  • Loading branch information
pdudenas authored Apr 4, 2024
2 parents 1b43ab4 + dfa928f commit 6603dd4
Showing 1 changed file with 28 additions and 19 deletions.
47 changes: 28 additions & 19 deletions src/PyHyperScattering/SST1RSoXSDB.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def __init__(
catalog_kwargs={},
use_precise_positions=False,
use_chunked_loading=False,
suppress_time_dimension=True,
):
"""
Args:
Expand All @@ -77,6 +78,7 @@ def __init__(
catalog_kwargs (dict): kwargs to be passed to a from_profile catalog generation script. For example, you can ask for Dask arrays here.
use_precise_positions (bool): if False, rounds sam_x and sam_y to 1 digit. If True, keeps default rounding (4 digits). Needed for spiral scans to work with readback positions.
use_chunked_loading (bool): if True, returns Dask backed arrays for further Dask processing. if false, behaves in conventional Numpy-backed way
suppress_time_dimension (bool): if True, time is never a dimension that you want in your data and will be dropped (default). if False, time will be a dimension in almost every scan.
"""

if corr_mode == None:
Expand Down Expand Up @@ -114,6 +116,7 @@ def __init__(
self.dark_pedestal = dark_pedestal
self.exposure_offset = exposure_offset
self.use_precise_positions = use_precise_positions
self.suppress_time_dimension = suppress_time_dimension

# def loadFileSeries(self,basepath):
# try:
Expand Down Expand Up @@ -287,7 +290,7 @@ def searchCatalog(
# Skip arguments with value None, and quits if the catalog was reduced to 0 elements
if (searchSeries[1] is not None) and (len(reducedCatalog) > 0):
# For numeric entries, do Key equality
if "numeric" in str(searchSeries[2]):
if "numeric" in str(searchSeries.iloc[2]):
reducedCatalog = reducedCatalog.search(
Key(searchSeries.iloc[0]) == float(searchSeries.iloc[1])
)
Expand Down Expand Up @@ -568,11 +571,11 @@ def loadRun(
raw (xarray): raw xarray containing your scan in PyHyper-compliant format
"""
if type(run) is int:
if isinstance(run,int):
run = self.c[run]
elif type(run) is pd.DataFrame:
elif isinstance(run,pd.DataFrame):
run = list(run.scan_id)
if type(run) is list:
if isinstance(run,list):
return self.loadSeries(
run,
"sample_name",
Expand Down Expand Up @@ -606,30 +609,39 @@ def loadRun(
else:
axes_to_include = []
rsd_cutoff = 0.005

# begin with a list of the things that are primary streams
axis_list = list(run["primary"]["data"].keys())

# next, knock out anything that has 'image', 'fullframe' in it - these aren't axes
axis_list = [x for x in axis_list if "image" not in x]
axis_list = [x for x in axis_list if "fullframe" not in x]
axis_list = [x for x in axis_list if "stats" not in x]
axis_list = [x for x in axis_list if "saturated" not in x]
axis_list = [x for x in axis_list if "under_exposed" not in x]

# knock out any known names of scalar counters
axis_list = [x for x in axis_list if "Beamstop" not in x]
axis_list = [x for x in axis_list if "Current" not in x]

if self.suppress_time_dimension:
axis_list = [x for x in axis_list if x != "time"]

# now, clean up duplicates.
axis_list = [x for x in axis_list if "setpoint" not in x]
# now, figure out what's actually moving. we use a relative standard deviation to do this.
# arbitrary cutoff of 0.5% motion = it moved intentionally.
for axis in axis_list:
std = np.std(run["primary"]["data"][axis])
mean = np.mean(run["primary"]["data"][axis])
rsd = std / mean

motion = np.abs(np.max(run["primary"]["data"][axis])-np.min(run["primary"]["data"][axis]))
if motion == 0:
rsd = 0
else:
rsd = std / motion
#print(f'Evaluating {axis} for inclusion as a dimension with rsd {rsd}...')
if rsd > rsd_cutoff:
axes_to_include.append(axis)
#print(f' --> it was included')

# next, construct the reverse lookup table - best mapping we can make of key to pyhyper word
# we start with the lookup table used by loadMd()
Expand Down Expand Up @@ -681,15 +693,11 @@ def loadRun(
"""

data = run["primary"]["data"][md["detector"] + "_image"]
if (
type(data) == tiled.client.array.ArrayClient
or type(data) == tiled.client.array.DaskArrayClient
):
if isinstance(data,tiled.client.array.ArrayClient):
data = run["primary"]["data"].read()[md["detector"] + "_image"]
elif isinstance(data,tiled.client.array.DaskArrayClient):
data = run["primary"]["data"].read()[md["detector"] + "_image"]
elif type(data) == tiled.client.array.DaskArrayClient:
data = xr.DataArray(
data.read(), dims=data.dims
) # xxx hack! Really should use tiled structure_clients xarraydaskclient here.

data = data.astype(int) # convert from uint to handle dark subtraction

if self.dark_subtract:
Expand All @@ -707,7 +715,7 @@ def loadRun(
def subtract_dark(img, pedestal=100, darks=None):
return img + pedestal - darks[int(img.dark_id.values)]

data = data.groupby("time").map(subtract_dark, darks=dark, pedestal=self.dark_pedestal)
data = data.groupby("time",squeeze=False).map(subtract_dark, darks=dark, pedestal=self.dark_pedestal)

dims_to_join = []
dim_names_to_join = []
Expand All @@ -733,11 +741,12 @@ def subtract_dark(img, pedestal=100, darks=None):
if len(index) != len(data["time"]):
index = index[: len(data["time"])]
actual_exposure = md["exposure"] * len(data.dim_0)
mindex_coords = xr.Coordinates.from_pandas_multiindex(index, 'system')
retxr = (
data.sum("dim_0")
.rename({"dim_1": "pix_y", "dim_2": "pix_x"})
.rename({"time": "system"})
.assign_coords(system=index)
.assign_coords(mindex_coords)
) # ,md['detector']+'_image':'intensity'})

# this is needed for holoviews compatibility, hopefully does not break other features.
Expand Down Expand Up @@ -776,7 +785,7 @@ def subtract_dark(img, pedestal=100, darks=None):
retxr = retxr / monitors["RSoXS Au Mesh Current"]
elif self.corr_mode != "none":
warnings.warn(
"corrections other than none are not supported at the moment",
"corrections other than none or i0 are not supported at the moment",
stacklevel=2,
)

Expand Down

0 comments on commit 6603dd4

Please sign in to comment.