From cb0b6df821108a163bf7fbb02c0de878c9f8eb88 Mon Sep 17 00:00:00 2001 From: Ryuichi Arafune Date: Wed, 7 Feb 2024 18:21:40 +0900 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=A5=20=20Remove=20trace=20arg=20and=20?= =?UTF-8?q?related=20file.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- arpes/endstations/__init__.py | 31 +++----- arpes/endstations/fits_utils.py | 56 +++++--------- arpes/endstations/plugin/fallback.py | 8 +- arpes/fits/utilities.py | 1 - arpes/io.py | 3 +- arpes/trace.py | 98 ------------------------- arpes/utilities/conversion/forward.py | 2 - arpes/utilities/conversion/trapezoid.py | 14 +--- 8 files changed, 37 insertions(+), 176 deletions(-) delete mode 100644 arpes/trace.py diff --git a/arpes/endstations/__init__.py b/arpes/endstations/__init__.py index 24d00ebc..da05ebd8 100644 --- a/arpes/endstations/__init__.py +++ b/arpes/endstations/__init__.py @@ -21,7 +21,6 @@ from arpes.load_pxt import find_ses_files_associated, read_single_pxt from arpes.provenance import PROVENANCE, provenance_from_file from arpes.repair import negate_energy -from arpes.trace import Trace, traceable from arpes.utilities.dict import rename_dataarray_attrs from .fits_utils import find_clean_coords @@ -153,11 +152,8 @@ class EndstationBase: RENAME_KEYS: ClassVar[dict[str, str]] = {} - trace: Trace - def __init__(self) -> None: """Initialize.""" - self.trace = Trace(silent=True) @classmethod def is_file_accepted( @@ -455,13 +451,13 @@ def load(self, scan_desc: SCANDESC | None = None, **kwargs: Incomplete) -> xr.Da """ if scan_desc is None: scan_desc = {} - self.trace("Resolving frame locations") + logger.debug("Resolving frame locations") resolved_frame_locations = self.resolve_frame_locations(scan_desc) - self.trace(f"resolved_frame_locations: {resolved_frame_locations}") + logger.debug(f"resolved_frame_locations: {resolved_frame_locations}") if not resolved_frame_locations: msg = "File not found" raise RuntimeError(msg) - self.trace(f"Found frames: {resolved_frame_locations}") + logger.debug(f"Found frames: {resolved_frame_locations}") frames = [ self.load_single_frame(fpath, scan_desc, **kwargs) for fpath in resolved_frame_locations ] @@ -794,7 +790,7 @@ def load_single_frame( for k, v in kwargs.items(): logger.debug(f" key {k}: value{v}") # Use dimension labels instead of - self.trace("Opening FITS HDU list.") + logger.debug("Opening FITS HDU list.") hdulist = fits.open(frame_path, ignore_missing_end=True) primary_dataset_name = None @@ -805,12 +801,12 @@ def load_single_frame( del hdulist[i].header["UN_0_0"] hdulist[i].header["UN_0_0"] = "" if "TTYPE2" in hdulist[i].header and hdulist[i].header["TTYPE2"] == "Delay": - self.trace("Using ps delay units. This looks like an ALG main chamber scan.") + logger.debug("Using ps delay units. This looks like an ALG main chamber scan.") hdulist[i].header["TUNIT2"] = "" del hdulist[i].header["TUNIT2"] hdulist[i].header["TUNIT2"] = "ps" - self.trace(f"HDU {i}: Attempting to fix FITS errors.") + logger.debug(f"HDU {i}: Attempting to fix FITS errors.") with warnings.catch_warnings(): warnings.simplefilter("ignore") hdulist[i].verify("fix+warn") @@ -836,9 +832,8 @@ def load_single_frame( hdu, attrs, mode="MC", - trace=self.trace, ) - self.trace("Recovered coordinates from FITS file.") + logger.debug("Recovered coordinates from FITS file.") attrs = rename_keys(attrs, self.RENAME_KEYS) scan_desc = rename_keys(scan_desc, self.RENAME_KEYS) @@ -987,7 +982,7 @@ def prep_spectrum(data: xr.DataArray) -> xr.DataArray: k: np.deg2rad(c) if k in deg_to_rad_coords else c for k, c in built_coords.items() } - self.trace("Stitching together xr.Dataset.") + logger.debug("Stitching together xr.Dataset.") return xr.Dataset( { f"safe-{name}" if name in data_var.coords else name: data_var @@ -1086,12 +1081,10 @@ def resolve_endstation(*, retry: bool = True, **kwargs: Incomplete) -> type[Ends raise ValueError(msg) from key_error -@traceable def load_scan( scan_desc: SCANDESC, *, retry: bool = True, - trace: Trace | None = None, **kwargs: Incomplete, ) -> xr.Dataset: """Resolves a plugin and delegates loading a scan. @@ -1106,7 +1099,6 @@ def load_scan( Args: scan_desc: Information identifying the scan, typically a scan number or full path. retry: Used to attempt a reload of plugins and subsequent data load attempt. - trace: Trace instance for debugging, pass True or False (default) to control this parameter kwargs: pass to the endstation.load(scan_dec, **kwargs) Returns: @@ -1118,7 +1110,7 @@ def load_scan( full_note.update(note) endstation_cls = resolve_endstation(retry=retry, **full_note) - trace(f"Using plugin class {endstation_cls}") if trace else None + logger.debug(f"Using plugin class {endstation_cls}") key: Literal["file", "path"] = "file" if "file" in scan_desc else "path" @@ -1130,7 +1122,6 @@ def load_scan( except ValueError: pass - trace(f"Loading {scan_desc}") if trace else None + logger.debug(f"Loading {scan_desc}") endstation = endstation_cls() - endstation.trace = trace - return endstation.load(scan_desc, trace=trace, **kwargs) + return endstation.load(scan_desc, **kwargs) diff --git a/arpes/endstations/fits_utils.py b/arpes/endstations/fits_utils.py index a677cb79..c9c31d82 100644 --- a/arpes/endstations/fits_utils.py +++ b/arpes/endstations/fits_utils.py @@ -5,7 +5,7 @@ import functools import warnings from ast import literal_eval -from collections.abc import Callable, Iterable +from collections.abc import Iterable from logging import DEBUG, INFO, Formatter, StreamHandler, getLogger from typing import TYPE_CHECKING, Any, TypeAlias @@ -13,7 +13,6 @@ from numpy import ndarray from numpy._typing import NDArray -from arpes.trace import Trace, traceable from arpes.utilities.funcutils import collect_leaves, iter_leaves if TYPE_CHECKING: @@ -53,19 +52,15 @@ Dimension = str -@traceable def extract_coords( attrs: dict[str, Any], dimension_renamings: dict[str, str] | None = None, - trace: Trace | None = None, ) -> tuple[CoordsDict, list[Dimension], list[int]]: """Does the hard work of extracting coordinates from the scan description. Args: attrs: dimension_renamings: - trace: A Trace instance used for debugging. You can pass True or False (including to the - originating load_data call) to enable execution tracing. Returns: A tuple consisting of the coordinate arrays, the dimension names, and their shapes @@ -75,7 +70,7 @@ def extract_coords( try: n_loops = attrs["LWLVLPN"] - trace(f"Found n_loops={n_loops}") if trace else None + logger.debug(f"Found n_loops={n_loops}") except KeyError: # Looks like no scan, this happens for instance in the SToF when you take a single # EDC @@ -89,9 +84,9 @@ def extract_coords( scan_coords = {} for loop in range(n_loops): n_scan_dimensions = attrs[f"NMSBDV{loop}"] - trace(f"Considering loop {loop}, n_scan_dimensions={n_scan_dimensions}") if trace else None + logger.debug(f"Considering loop {loop}, n_scan_dimensions={n_scan_dimensions}") if attrs[f"SCNTYP{loop}"] == 0: - trace("Loop is computed") if trace else None + logger.debug("Loop is computed") for i in range(n_scan_dimensions): name, start, end, n = ( attrs[f"NM_{loop}_{i}"], @@ -118,13 +113,13 @@ def extract_coords( # # As of 2021, that is the perspective we are taking on the issue. elif n_scan_dimensions > 1: - trace("Loop is tabulated and is not region based") if trace else None + logger.debug("Loop is tabulated and is not region based") for i in range(n_scan_dimensions): name = attrs[f"NM_{loop}_{i}"] if f"ST_{loop}_{i}" not in attrs and f"PV_{loop}_{i}_0" in attrs: msg = f"Determined that coordinate {name} " msg += "is tabulated based on scan coordinate. Skipping!" - trace(msg) if trace else None + logger.debug(msg) continue start, end, n = ( float(attrs[f"ST_{loop}_{i}"]), @@ -134,14 +129,14 @@ def extract_coords( old_name = name name = dimension_renamings.get(name, name) - trace(f"Renaming: {old_name} -> {name}") if trace else None + logger.debug(f"Renaming: {old_name} -> {name}") scan_dimension.append(name) scan_shape.append(n) scan_coords[name] = np.linspace(start, end, n, endpoint=True) else: - trace("Loop is tabulated and is region based") if trace else None + logger("Loop is tabulated and is region based") name, n = ( attrs[f"NM_{loop}_0"], attrs[f"NMPOS_{loop}"], @@ -159,7 +154,7 @@ def extract_coords( n_regions = 1 name = dimension_renamings.get(name, name) - trace(f"Loop (name, n_regions, size) = {(name, n_regions, n)}") if trace else None + logger.debug(f"Loop (name, n_regions, size) = {(name, n_regions, n)}") coord: NDArray[np.float_] = np.array(()) for region in range(n_regions): @@ -171,7 +166,7 @@ def extract_coords( msg = f"Reading coordinate {region} from loop. (start, end, n)" msg += f"{(start, end, n)}" - trace(msg) if trace else None + logger.debug(msg) coord = np.concatenate((coord, np.linspace(start, end, n, endpoint=True))) @@ -181,14 +176,12 @@ def extract_coords( return scan_coords, scan_dimension, scan_shape -@traceable def find_clean_coords( hdu: BinTableHDU, attrs: dict[str, Any], spectra: Any = None, mode: str = "ToF", dimension_renamings: Any = None, - trace: Callable | None = None, ) -> tuple[CoordsDict, dict[str, list[Dimension]], dict[str, Any]]: """Determines the scan degrees of freedom, and reads coordinates. @@ -224,18 +217,13 @@ def find_clean_coords( scan_coords, scan_dimension, scan_shape = extract_coords( attrs, dimension_renamings=dimension_renamings, - trace=trace, ) - trace(f"Found scan shape {scan_shape} and dimensions {scan_dimension}.") if trace else None + logger.debug(f"Found scan shape {scan_shape} and dimensions {scan_dimension}.") # bit of a hack to deal with the internal motor used for the swept spectra being considered as # a cycle if "cycle" in scan_coords and len(scan_coords["cycle"]) > 200: - ( - trace("Renaming swept scan coordinate to cycle and extracting. This is hack.") - if trace - else None - ) + logger.debug("Renaming swept scan coordinate to cycle and extracting. This is hack.") idx = scan_dimension.index("cycle") real_data_for_cycle = hdu.data.columns["null"].array @@ -258,14 +246,14 @@ def find_clean_coords( spectra = [spectra] for spectrum_key in spectra: - trace(f"Considering potential spectrum {spectrum_key}") if trace else None + logger.debug(f"Considering potential spectrum {spectrum_key}") skip_names = { lambda name: bool("beamview" in name or "IMAQdx" in name), } if spectrum_key is None: spectrum_key = hdu.columns.names[-1] - trace(f"Column name was None, using {spectrum_key}") if trace else None + logger.debug(f"Column name was None, using {spectrum_key}") if isinstance(spectrum_key, str): spectrum_key = hdu.columns.names.index(spectrum_key) + 1 @@ -279,7 +267,7 @@ def find_clean_coords( if (callable(skipped) and skipped(spectrum_name)) or skipped == spectrum_name: should_skip = True if should_skip: - trace("Skipping column.") if trace else None + logger.debug("Skipping column.") continue try: @@ -287,23 +275,17 @@ def find_clean_coords( delta = hdu.header[f"TDELT{spectrum_key}"] offset = literal_eval(offset) if isinstance(offset, str) else offset delta = literal_eval(delta) if isinstance(delta, str) else delta - trace(f"Determined (offset, delta): {(offset, delta)}.") if trace else None + logger.debug(f"Determined (offset, delta): {(offset, delta)}.") try: shape = hdu.header[f"TDIM{spectrum_key}"] shape = literal_eval(shape) if isinstance(shape, str) else shape loaded_shape_from_header = True - ( - trace(f"Successfully loaded coordinate shape from header: {shape}") - if trace - else None - ) + logger.debug(f"Successfully loaded coordinate shape from header: {shape}") except KeyError: shape = hdu.data.field(spectrum_key - 1).shape - ( - trace(f"Could not use header to determine coordinate shape, using: {shape}") - if trace - else None + logger.debug( + f"Could not use header to determine coordinate shape, using: {shape}", ) try: diff --git a/arpes/endstations/plugin/fallback.py b/arpes/endstations/plugin/fallback.py index fedea8e0..d09d54cc 100644 --- a/arpes/endstations/plugin/fallback.py +++ b/arpes/endstations/plugin/fallback.py @@ -1,4 +1,5 @@ """Implements dynamic plugin selection when users do not specify the location for their data.""" + from __future__ import annotations import warnings @@ -6,7 +7,6 @@ from typing import TYPE_CHECKING, ClassVar from arpes.endstations import EndstationBase, resolve_endstation -from arpes.trace import Trace, traceable if TYPE_CHECKING: from pathlib import Path @@ -59,12 +59,9 @@ class FallbackEndstation(EndstationBase): ] @classmethod - @traceable def determine_associated_loader( cls: type[FallbackEndstation], file: str | Path, - *, - trace: Trace | None = None, ) -> type[EndstationBase]: """Determines which loading plugin to use for a given piece of data. @@ -76,7 +73,7 @@ def determine_associated_loader( arpes.config.load_plugins() for location in cls.ATTEMPT_ORDER: - trace(f"{cls.__name__} is trying {location}") + logger.debug(f"{cls.__name__} is trying {location}") try: endstation_cls = resolve_endstation(retry=False, location=location) @@ -104,7 +101,6 @@ def load( associated_loader = FallbackEndstation.determine_associated_loader( file, scan_desc, - trace=self.trace, ) try: file_number = int(file) diff --git a/arpes/fits/utilities.py b/arpes/fits/utilities.py index 65b51bc9..04742b1d 100644 --- a/arpes/fits/utilities.py +++ b/arpes/fits/utilities.py @@ -150,7 +150,6 @@ def broadcast_model( than 20 fits were requested progress: Whether to show a progress bar safe: Whether to mask out nan values - trace: Controls whether execution tracing/timestamping is used for performance investigation Returns: An `xr.Dataset` containing the curve fitting results. These are data vars: diff --git a/arpes/io.py b/arpes/io.py index 9a35d053..8c6ec123 100644 --- a/arpes/io.py +++ b/arpes/io.py @@ -98,8 +98,7 @@ def load_data( ), stacklevel=2, ) - if kwargs.get("trace"): - logger.debug(f"contents of desc: {desc}") + logger.debug(f"contents of desc: {desc}") return load_scan(desc, **kwargs) diff --git a/arpes/trace.py b/arpes/trace.py deleted file mode 100644 index 9cd776c7..00000000 --- a/arpes/trace.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Provides lightweight perf and tracing tools which also provide light logging functionality.""" - -from __future__ import annotations - -import functools -import time -from dataclasses import dataclass, field -from logging import DEBUG, INFO, Formatter, StreamHandler, getLogger -from typing import TYPE_CHECKING, ParamSpec, TypeVar - -if TYPE_CHECKING: - from collections.abc import Callable - - -__all__ = [ - "traceable", -] -LOGLEVELS = (DEBUG, INFO) -LOGLEVEL = LOGLEVELS[1] -logger = getLogger(__name__) -fmt = "%(asctime)s %(levelname)s %(name)s :%(message)s" -formatter = Formatter(fmt) -handler = StreamHandler() -handler.setLevel(LOGLEVEL) -logger.setLevel(LOGLEVEL) -handler.setFormatter(formatter) -logger.addHandler(handler) -logger.propagate = False - - -@dataclass -class Trace: - silent: bool = False - start_time: float = field(default_factory=time.time_ns) - - def __call__(self, message: str) -> None: - """[TODO:summary]. - - [TODO:description] - - Args: - message: [TODO:description] - - Returns: - [TODO:description] - """ - if self.silent: - return - - now = time.time_ns() - elapsed = (now - self.start_time) // 1000000 # to ms - message = f"{elapsed} ms: {message}" - logger.info(message) - - -P = ParamSpec("P") -R = TypeVar("R") - - -def traceable(original: Callable[P, R]) -> Callable[P, R]: - """A decorator which takes a function and feeds a trace instance through its parameters. - - The call API of the returned function is that there is a `trace=` parameter which expects - a bool (feature gate). - - Internally, this decorator turns that into a `Trace` instance and silences it if tracing is - to be disabled (the user passed trace=False or did not pass trace= by keyword). - - Args: - original: The function to decorate - - Returns: - The decorated function which accepts a trace= keyword argument. - """ - - @functools.wraps(original) - def _inner(*args: P.args, **kwargs: P.kwargs) -> R: - """[TODO:summary]. - - [TODO:description] - - Args: - args: [TODO:description] - kwargs: [TODO:description] - - Returns: - [TODO:description] - """ - trace = kwargs.get("trace", False) - - # this allows us to pass Trace instances into function calls - if not isinstance(trace, Trace): - trace = Trace(silent=not trace) - - kwargs["trace"] = trace - return original(*args, **kwargs) - - return _inner diff --git a/arpes/utilities/conversion/forward.py b/arpes/utilities/conversion/forward.py index 329f3ad7..c84749f3 100644 --- a/arpes/utilities/conversion/forward.py +++ b/arpes/utilities/conversion/forward.py @@ -179,7 +179,6 @@ def convert_through_angular_pair( # noqa: PLR0913 axis on the momentum converted data. relative_coords: Whether to give `cut_specification` relative to the momentum converted location specified in `coords` - trace: Flag controlling execution tracing k_coords: Passed as hints through to `convert_coordinate_forward`. Returns: @@ -269,7 +268,6 @@ def convert_through_angular_point( axis on the momentum converted data. relative_coords: Whether to give `cut_specification` relative to the momentum converted location specified in `coords` - trace: Flag controlling execution tracing k_coords: Passed as hints through to `convert_coordinate_forward`. Returns: diff --git a/arpes/utilities/conversion/trapezoid.py b/arpes/utilities/conversion/trapezoid.py index 395c026b..b0973235 100644 --- a/arpes/utilities/conversion/trapezoid.py +++ b/arpes/utilities/conversion/trapezoid.py @@ -10,7 +10,6 @@ import numpy as np import xarray as xr -from arpes.trace import traceable from arpes.utilities import normalize_to_spectrum from .base import CoordinateConverter @@ -180,11 +179,9 @@ def phi_to_phi_forward( return phi_out -@traceable def apply_trapezoidal_correction( data: xr.DataArray, corners: list[dict[str, float]], - trace: Callable | None = None, ) -> xr.DataArray: """Applies the trapezoidal correction to data in angular units by linearly interpolating slices. @@ -197,8 +194,6 @@ def apply_trapezoidal_correction( corners: These don't actually have to be corners, but are waypoints of the conversion. Use points near the Fermi level and near the bottom of the spectrum just at the edge of recorded angular region. - trace: A trace instance which can be used to enable execution tracing and debugging. - Pass ``True`` to enable. Returns: @@ -228,11 +223,11 @@ def apply_trapezoidal_correction( original_coords = data.coords - trace("Determining dimensions.") if trace else None + logger.debug("Determining dimensions.") if "phi" not in data.dims: msg = "The data must have a phi coordinate." raise ValueError(msg) - trace("Replacing dummy coordinates with index-like ones.") if trace else None + logger.debug("Replacing dummy coordinates with index-like ones.") removed = [d for d in data.dims if d not in ["eV", "phi"]] data = data.transpose(*(["eV", "phi", *removed])) converted_dims = data.dims @@ -244,7 +239,7 @@ def apply_trapezoidal_correction( converter = ConvertTrapezoidalCorrection(data, converted_dims, corners=corners) converted_coordinates = converter.get_coordinates() - trace("Calling convert_coordinates") if trace else None + logger.debug("Calling convert_coordinates") result = convert_coordinates( data, converted_coordinates, @@ -254,10 +249,9 @@ def apply_trapezoidal_correction( zip(data.dims, [converter.conversion_for(d) for d in data.dims], strict=True), ), }, - trace=trace, ) assert isinstance(result, xr.DataArray) - trace("Reassigning index-like coordinates.") if trace else None + logger.debug("Reassigning index-like coordinates.") result = result.assign_coords(restore_index_like_coordinates) result = result.assign_coords( {c: v for c, v in original_coords.items() if c not in result.coords},