From 9288d5bc749d2bb5046598e64fd834d381e4d2c2 Mon Sep 17 00:00:00 2001 From: Seth Lawler Date: Thu, 27 Jun 2024 17:20:51 -0400 Subject: [PATCH] run ruff fix --- ripple/conflate/rasfim.py | 3 +- ripple/errors.py | 46 +++++++++++----------- ripple/ops/create_ras_terrain.py | 1 - ripple/ops/subset_gpkg.py | 1 - ripple/process.py | 4 +- ripple/ras.py | 30 +++++++-------- ripple/stacio/fim_collection.py | 1 - ripple/stacio/gpkg_utils.py | 65 +++++++++++++++++++------------- ripple/stacio/utils/dg_utils.py | 24 ++++++++---- ripple/stacio/utils/s3_utils.py | 22 +++++++---- ripple/utils.py | 12 ++---- 11 files changed, 113 insertions(+), 96 deletions(-) diff --git a/ripple/conflate/rasfim.py b/ripple/conflate/rasfim.py index 7ea3d577..0055a31f 100644 --- a/ripple/conflate/rasfim.py +++ b/ripple/conflate/rasfim.py @@ -55,7 +55,8 @@ class RasFimConflater: load_data (bool, optional): Load the data on initialization. Defaults to True. bucket (str, optional): S3 bucket to read data from. Defaults to "fim". - Raises: + Raises + ------ ValueError: Required layer not found in the GeoPackage DriverError: Unable to read the GeoPackage """ diff --git a/ripple/errors.py b/ripple/errors.py index 3850ef25..5c8b4507 100644 --- a/ripple/errors.py +++ b/ripple/errors.py @@ -1,69 +1,69 @@ -class ProjectionNotFoundError(Exception): - pass +class ProjectionNotFoundError(Exception): # noqa: D100 + """TODO.""" class NoDefaultEPSGError(Exception): - pass + """TODO.""" class ModelNotFoundError(Exception): - pass + """TODO.""" class NotGeoreferencedError(Exception): - pass + """TODO.""" class CouldNotIdentifyPrimaryPlanError(Exception): - pass + """TODO.""" class NoFlowFileSpecifiedError(Exception): - pass + """TODO.""" class NoGeometryFileSpecifiedError(Exception): - pass + """TODO.""" class NotAPrjFile(Exception): - pass + """TODO.""" class NoCRSInferredError(Exception): - pass + """TODO.""" class UnkownCRSUnitsError(Exception): - pass + """TODO.""" class HECRASVersionNotInstalledError(Exception): - pass + """TODO.""" class NoRiverLayerError(Exception): - pass + """TODO.""" class NoCrossSectionLayerError(Exception): - pass + """TODO.""" class FlowTitleAlreadyExistsError(Exception): - pass + """TODO.""" class PlanTitleAlreadyExistsError(Exception): - pass + """TODO.""" class CouldNotFindAnyPlansError(Exception): - pass + """TODO.""" class ToManyPlansError(Exception): - pass + """TODO.""" class RASComputeTimeoutError(Exception): @@ -71,20 +71,20 @@ class RASComputeTimeoutError(Exception): class RASComputeError(Exception): - """Raised when *.pNN.computeMsgs.txt indicates error""" + """Raised when *.pNN.computeMsgs.txt indicates error.""" class RASComputeMeshError(Exception): - """Raised when *.pNN.computeMsgs.txt indicates mesh-specific error""" + """Raised when *.pNN.computeMsgs.txt indicates mesh-specific error.""" class RASGeometryError(Exception): - """Raised when *.pNN.computeMsgs.txt indicates geometry-specific error""" + """Raised when *.pNN.computeMsgs.txt indicates geometry-specific error.""" class RASStoreAllMapsError(Exception): - """Raised when *.pNN.computeMsgs.txt indicates StoreAllMaps error (related to RAS Mapper postprocessing)""" + """Raised when *.pNN.computeMsgs.txt indicates StoreAllMaps error (related to RAS Mapper postprocessing).""" class DepthGridNotFoundError(Exception): - """Raised when a depth grid is not found when clipping raw RAS output""" + """Raised when a depth grid is not found when clipping raw RAS output.""" diff --git a/ripple/ops/create_ras_terrain.py b/ripple/ops/create_ras_terrain.py index 7d02b881..2cb7dd5e 100644 --- a/ripple/ops/create_ras_terrain.py +++ b/ripple/ops/create_ras_terrain.py @@ -44,7 +44,6 @@ def write_projection_file(crs: CRS, terrain_directory: str): def main(output_terrain_hdf_filepath: str, gpkg_path: str, conflation_parameters: dict): """Requires Windows with geospatial libs, so typically run using OSGeo4W shell.""" - if conflation_parameters["us_xs"]["xs_id"] == "-9999": print(f"skipping {nwm_id}; no cross sections conflated.") else: diff --git a/ripple/ops/subset_gpkg.py b/ripple/ops/subset_gpkg.py index 1ee1fada..4b3f5710 100644 --- a/ripple/ops/subset_gpkg.py +++ b/ripple/ops/subset_gpkg.py @@ -14,7 +14,6 @@ def new_gpkg( """ Using ripple conflation data, creates a new GPKG from an existing ras geopackage """ - if ripple_parameters["us_xs"]["xs_id"] == "-9999": ripple_parameters["messages"] = f"skipping {nwm_id}; no cross sections conflated." print(ripple_parameters["messages"]) diff --git a/ripple/process.py b/ripple/process.py index c4cd4e05..e772d424 100644 --- a/ripple/process.py +++ b/ripple/process.py @@ -246,10 +246,10 @@ def create_flow_depth_combinations( of the downstream cross section of the reach min_depths (pd.Series): minimum depth to be included. (typically derived from a previous noraml depth run) - Returns: + Returns + ------- tuple: tuple of depths, flows, and wses """ - depths, flows, wses = [], [], [] for wse, depth in zip(ds_wses, ds_depths): diff --git a/ripple/ras.py b/ripple/ras.py index e061a61d..f85e3c72 100644 --- a/ripple/ras.py +++ b/ripple/ras.py @@ -323,7 +323,6 @@ def run_sim( close_ras (bool, optional): boolean to close RAS or not after computing. Defaults to True. show_ras (bool, optional): boolean to show RAS or not when computing. Defaults to True. """ - compute_message_file = self.ras_project._ras_root_path + f"{self.plan.file_extension}.computeMsgs.txt" RC = win32com.client.Dispatch(f"RAS{self.version}.HECRASCONTROLLER") @@ -429,7 +428,6 @@ def update_rasmapper_for_mapping(self): """ Write a rasmapper file to output depth grids for the current plan """ - # manage rasmapper map_file = f"{self.ras_project._ras_root_path}.rasmap" @@ -694,10 +692,10 @@ def new_plan_contents(self, title: str, short_id: str, flow, geom, run_rasmapper """ populate the content of the plan with basic attributes (title, short_id, flow, and geom) - Raises: + Raises + ------ RuntimeError: raise run time error if the plan already has content associated with it """ - if self.contents: raise RuntimeError(f"content already exists for this plan: {self._ras_text_file_path}") @@ -729,13 +727,14 @@ def read_rating_curves(self) -> dict: """ Read the flow and water surface elevations resulting from the computed plan - Raises: + Raises + ------ FileNotFoundError: _description_ - Returns: + Returns + ------- dict: A dictionary containing "wse" and "flow" keys whose values are pandas dataframes """ - # check if the hdf file exists; raise error if it does not if not self.hdf_file: self.hdf_file = self.text_file + ".hdf" @@ -976,7 +975,8 @@ def write_headers(self, title: str, profile_names: list[str]): title (str): title of the flow profile_names (list[str]): profile names for the flow - Returns: + Returns + ------- list (list[str]): lines of the flow content """ lines = [ @@ -996,7 +996,6 @@ def write_discharges(self, flows: list, river: str, reach: str, river_station: f reach (str): Ras reach river_station (float): Ras river station """ - lines = [] lines.append(f"River Rch & RM={river},{reach.ljust(16,' ')},{str(river_station).ljust(8,' ')}") line = "" @@ -1113,7 +1112,8 @@ def read_contents(self): """ Read contents of the file. Searches for the file locally and on s3. - Raises: + Raises + ------ FileNotFoundError: """ if os.path.exists(self.text_file): @@ -1140,10 +1140,10 @@ def update_crs(self, projection_file: str): Args: projection_file (str): path to projeciton file containing the coordinate system (.prj) - Raises: + Raises + ------ FileNotFoundError: """ - directory = os.path.dirname(self.text_file) crs_base = os.path.basename(projection_file) @@ -1167,7 +1167,6 @@ def add_result_layers(self, plan_short_id: str, profiles: list[str], variable: s profiles (list[str]): Profiles for the output raster(s) variable (str): Variable to create rasters for. Currently "Depth" is the only supported variable. """ - if variable not in ["Depth"]: raise NotImplementedError( f"Variable {variable} not currently implemented. Currently only Depth is supported." @@ -1218,7 +1217,6 @@ def add_terrain(self, terrain_name: str, terrain_path: str): """ Add Terrain to RasMap content """ - lines = [] for line in self.contents.splitlines(): @@ -1235,7 +1233,6 @@ def write(self): """ write Ras Map contents to file """ - logging.info(f"writing: {self.text_file}") with open(self.text_file, "w") as f: @@ -1277,7 +1274,8 @@ def get_new_extension_number(dict_of_ras_subclasses: dict) -> str: dict_of_ras_subclasses (dict): A dictionary containing plan/geom/flow titles as keys and objects plan/geom/flow as values. - Returns: + Returns + ------- new file extension (str): The new file exension. """ extension_number = [] diff --git a/ripple/stacio/fim_collection.py b/ripple/stacio/fim_collection.py index d80fb97e..c04c4e0c 100644 --- a/ripple/stacio/fim_collection.py +++ b/ripple/stacio/fim_collection.py @@ -240,7 +240,6 @@ def add_ripple_params( TODO: Placeholder function for adding ripple-params to FIM collection items. This assumes the conflation output is in the same directory as the `project-file` asset """ - if asset_role != "project-file": raise NotImplementedError("Only project-file is supported at this time.") diff --git a/ripple/stacio/gpkg_utils.py b/ripple/stacio/gpkg_utils.py index d79a6c85..359d9fcd 100644 --- a/ripple/stacio/gpkg_utils.py +++ b/ripple/stacio/gpkg_utils.py @@ -41,14 +41,15 @@ def gpkg_to_geodataframe(gpkg_s3_uri: str) -> dict: """ Converts a local geopackage file to a GeoDataFrame. - Parameters: + Parameters + ---------- gpkg_key (str): Path of locally saved geopackage. - Returns: + Returns + ------- gpkg_gdf (dict): dictionary of GeoDataFrame. """ - layers = fiona.listlayers(gpkg_s3_uri) gdfs = {} @@ -69,13 +70,13 @@ def create_thumbnail_from_gpkg(gdfs: dict, png_s3_key: str, bucket: str, s3_clie """ Generates a PNG thumbnail for a geopandas dataframe and uploads it to AWS S3. - Parameters: + Parameters + ---------- - gdf (dict): A dictionary of geopandas dataframes containing the geometries to plot. - png_s3_key (str): The S3 path where the generated PNG thumbnail is to be stored. - bucket (str): The S3 bucket - s3_client: The AWS S3 client instance used for uploading the PNG. """ - # Define colors for each layer type # Plotting @@ -115,15 +116,16 @@ def create_geom_item( """ This function creates a PySTAC Item for a gpkg file stored in an AWS S3 bucket. - Parameters: + Parameters + ---------- gpkg_key (str): gpkg file key for item_id naming. bbox: Item bounding box. footprint: Item Footprint. - Returns: + Returns + ------- pystac.Item: The PySTAC Item representing the gpkg file. """ - gpkg_name = gpkg_key.split("/")[-1].replace(".gpkg", "") item_id = gpkg_name @@ -157,14 +159,15 @@ def parse_featuresproperties(json_data, metadata_to_remove): """ Parses and cleans FeaturesProperties data from json. Removes unwanted fields. - Parameters: + Parameters + ---------- json_data (dict): Input JSON data containing the metadata to be parsed. metadata_to_remove (list): List of metadata fields to be removed from the output. - Returns: + Returns + ------- dict: Organized FeaturesProperties data. """ - # Navigate to FeaturesProperties within the JSON data geometry_files = json_data["Files"]["InputFiles"]["GeometryFiles"]["FeaturesProperties"] @@ -195,11 +198,13 @@ def parse_control_files(json_data, metadata_to_remove): """ Parses and cleans ControlFiles data from json. Removes unwanted fields. - Parameters: + Parameters + ---------- json_data (dict): Input JSON data containing the metadata to be parsed. metadata_to_remove (list): List of metadata fields to be removed from the output. - Returns: + Returns + ------- dict: Organized ControlFiles data. """ # Navigate to ControlFiles within the JSON data @@ -229,11 +234,13 @@ def parse_forcingfiles(json_data, metadata_to_remove): """ Parses and cleans ForcingFiles data from json. Removes unwanted fields. - Parameters: + Parameters + ---------- json_data (dict): Input JSON data containing the metadata to be parsed. metadata_to_remove (list): List of metadata fields to be removed from the output. - Returns: + Returns + ------- dict: Organized ForcingFiles data. """ # Navigate to ForcingFiles within the JSON data @@ -265,14 +272,15 @@ def parse_metadata(json_data, metadata_to_remove): """ Parses and cleans metadata from a JSON object. Combines metadata from GeometryFiles, ControlFiles, and ForcingFiles. - Parameters: + Parameters + ---------- json_data (dict): Input JSON data containing the metadata to be parsed. metadata_to_remove (list): List of metadata fields to be removed from the output. - Returns: + Returns + ------- dict: Processed metadata with specified fields removed. """ - # Initialize a single dictionary to hold all reformatted data entries comprehensive_data = {} @@ -308,13 +316,14 @@ def parse_metadata(json_data, metadata_to_remove): def get_asset_info(asset_key: str, bucket: str): """This function generates information for an asset based on its file extension. - Parameters: + Parameters + ---------- asset_key (str): The S3 key of the asset. - Returns: + Returns + ------- dict: A dictionary with the roles, the description, and the title of the asset. """ - file_extension = Path(asset_key).suffix.lstrip(".") title = Path(asset_key).name.replace(" ", "_") description = "" @@ -369,15 +378,16 @@ def find_hash(item_metadata: Dict, asset_file: str): This function searches through a metadata dictionary for an asset file based on the file's extension. It then extracts and returns the hash value associated with that file extension from the metadata. - Parameters: + Parameters + ---------- item_metadata (dict): A dictionary containing metadata items with hash info. asset_file (str): The path to the asset file. - Returns: + Returns + ------- hash_dict (Dict): A dictionary with a single key-value pair where the value is the hash for the asset file. Returns an empty dictionary if no hash is found for the file extension. """ - hash_dict = {} file_extension = Path(asset_file).suffix.lstrip(".") @@ -393,13 +403,14 @@ def remove_hash_from_metadata(item_metadata: Dict): """ Removes "Hash" from each key (if it exists) in metedata dictionary. - Parameters: + Parameters + ---------- item_metadata (Dict): Dictionary of item metadata with hashs. - Returns: + Returns + ------- no_hash_metadata (Dict): New metadata dictionary without hash info. """ - # Copy the dictionary to avoid modifying the original no_hash_metadata = item_metadata.copy() for key in no_hash_metadata: diff --git a/ripple/stacio/utils/dg_utils.py b/ripple/stacio/utils/dg_utils.py index 57a2a87c..89ac4729 100644 --- a/ripple/stacio/utils/dg_utils.py +++ b/ripple/stacio/utils/dg_utils.py @@ -28,11 +28,13 @@ def get_raster_bounds( """ This function retrieves the geographic bounds of a raster file stored in an AWS S3 bucket and returns them in the WGS 84 (EPSG:4326) coordinate reference system. - Parameters: + Parameters + ---------- s3_key (str): The key of the raster file in the S3 bucket. aws_session (AWSSession): The AWS session to use to access the S3 bucket. - Returns: + Returns + ------- Tuple[float, float, float, float]: The geographic bounds of the raster file in the WGS 84 (EPSG:4326) coordinate reference system. The bounds are returned as a tuple of four floats: (west, south, east, north). """ if dev_mode: @@ -55,11 +57,13 @@ def get_raster_metadata(s3_key: str, aws_session: AWSSession, dev_mode: bool = F """ This function retrieves the metadata of a raster file stored in an AWS S3 bucket. - Parameters: + Parameters + ---------- s3_key (str): The key of the raster file in the S3 bucket. aws_session (AWSSession): The AWS session to use to access the S3 bucket. - Returns: + Returns + ------- dict: The metadata of the raster file. The metadata is returned as a dictionary where the keys are the names of the metadata items and the values are the values of the metadata items. """ @@ -76,10 +80,12 @@ def bbox_to_polygon(bbox) -> shapely.Polygon: """ This function converts a bounding box to a Shapely Polygon. - Parameters: + Parameters + ---------- bbox: The bounding box to convert. It should be a sequence of four numbers: (min_x, min_y, max_x, max_y). - Returns: + Returns + ------- shapely.Polygon: The Shapely Polygon representing the bounding box. The Polygon is a rectangle with the lower left corner at (min_x, min_y) and the upper right corner at (max_x, max_y). """ @@ -100,12 +106,14 @@ def create_depth_grid_item( """ This function creates a PySTAC Item for a depth grid raster file stored in an AWS S3 bucket. - Parameters: + Parameters + ---------- s3_obj (Object): The s3 object of the raster file in the S3 bucket. item_id (str): The ID to assign to the PySTAC Item. aws_session (AWSSession): The AWS session to use to access the S3 bucket. - Returns: + Returns + ------- pystac.Item: The PySTAC Item representing the raster file. The Item has an Asset with the href set to the S3 key of the raster file, the title set to the name of the raster file, the media type set to COG, and the role set to "ras-depth-grid". The Asset's extra fields are updated with the basic object metadata of the raster file diff --git a/ripple/stacio/utils/s3_utils.py b/ripple/stacio/utils/s3_utils.py index 256c354a..7f2e03dd 100644 --- a/ripple/stacio/utils/s3_utils.py +++ b/ripple/stacio/utils/s3_utils.py @@ -14,10 +14,12 @@ def get_basic_object_metadata(obj: ObjectSummary) -> dict: """ This function retrieves basic metadata of an AWS S3 object. - Parameters: + Parameters + ---------- obj (ObjectSummary): The AWS S3 object. - Returns: + Returns + ------- dict: A dictionary with the size, ETag, last modified date, storage platform, region, and storage tier of the object. """ try: @@ -38,7 +40,8 @@ def copy_item_to_s3(item, s3_key, s3client): """ This function copies an item to an AWS S3 bucket. - Parameters: + Parameters + ---------- item: The item to copy. It must have a `to_dict` method that returns a dictionary representation of it. s3_key (str): The file path in the S3 bucket to copy the item to. @@ -56,10 +59,12 @@ def split_s3_key(s3_key: str) -> tuple[str, str]: """ This function splits an S3 key into the bucket name and the key. - Parameters: + Parameters + ---------- s3_key (str): The S3 key to split. It should be in the format 's3://bucket/key'. - Returns: + Returns + ------- tuple: A tuple containing the bucket name and the key. If the S3 key does not contain a key, the second element of the tuple will be None. @@ -79,10 +84,12 @@ def s3_key_public_url_converter(url: str, dev_mode: bool = False) -> str: """ This function converts an S3 URL to an HTTPS URL and vice versa. - Parameters: + Parameters + ---------- url (str): The URL to convert. It should be in the format 's3://bucket/' or 'https://bucket.s3.amazonaws.com/'. - Returns: + Returns + ------- str: The converted URL. If the input URL is an S3 URL, the function returns an HTTPS URL. If the input URL is an HTTPS URL, the function returns an S3 URL. @@ -91,7 +98,6 @@ def s3_key_public_url_converter(url: str, dev_mode: bool = False) -> str: 2. If the input URL is an S3 URL, it converts it to an HTTPS URL. 3. If the input URL is an HTTPS URL, it converts it to an S3 URL. """ - if url.startswith("s3"): bucket = url.replace("s3://", "").split("/")[0] key = url.replace(f"s3://{bucket}", "")[1:] diff --git a/ripple/utils.py b/ripple/utils.py index 51fc05fb..f91ab081 100644 --- a/ripple/utils.py +++ b/ripple/utils.py @@ -125,8 +125,8 @@ def s3_upload_status_file( output location on s3. If e is None, then upload a 'succeed' json file. Either file will have key "time" indicating the time that the file was uploaded. A 'fail' file will also have keys "err" and "traceback" containing the exception as a string and the Python - traceback of the exception, respectively.""" - + traceback of the exception, respectively. + """ s3_output_key_succeed, s3_output_key_fail = s3_get_ripple_status_file_key_names(stac_href, s3_bucket, s3_client) time_now_str = datetime.now(tz=timezone.utc).isoformat() @@ -176,7 +176,8 @@ def s3_get_ripple_status_file_key_names( stac_href: str, s3_bucket: str, s3_client: botocore.client.BaseClient ) -> tuple[str, str]: """Return two S3 key paths, the first to a succeed sentinel file, the second t oa failure sentinel file. - This function does not check if the keys exist.""" + This function does not check if the keys exist. + """ _, s3_output_dir_key = extract_bucketname_and_keyname(s3_get_output_s3path(s3_bucket, stac_href)) s3_output_key_succeed = posixpath.join(s3_output_dir_key, "ripple-succeed.json") s3_output_key_fail = posixpath.join(s3_output_dir_key, "ripple-fail.json") @@ -238,7 +239,6 @@ def get_terrain_exe_path(ras_ver: str) -> str: def xs_concave_hull(xs: gpd.GeoDataFrame) -> gpd.GeoDataFrame: """Compute and return the concave hull (polygon) for a set of cross sections (lines all facing the same direction).""" - points = xs.boundary.explode(index_parts=True).unstack() points_last_xs = [Point(coord) for coord in xs["geometry"].iloc[-1].coords] points_first_xs = [Point(coord) for coord in xs["geometry"].iloc[0].coords[::-1]] @@ -296,7 +296,6 @@ def download_model( """ Download HEC-RAS model from stac href """ - # make RAS directory if it does not exists if not os.path.exists(ras_directory): os.makedirs(ras_directory) @@ -366,7 +365,6 @@ def text_block_from_start_end_str(start_str: str, end_str: str, lines: list, inc Search for an exact match to the start_str and return all lines from there to a line that contains the end_str. """ - results = [] in_block = False for line in lines: @@ -393,7 +391,6 @@ def text_block_from_start_str_to_empty_line(start_str: str, lines: list): Search for an exact match to the start_str and return all lines from there to the next empty line. """ - results = [] in_block = False for line in lines: @@ -419,7 +416,6 @@ def text_block_from_start_str_length(start_str: str, number_of_lines: int, lines start_token: """ - results = [] in_block = False for line in lines: