Skip to content

Commit

Permalink
complete /api/query-soil/analyze-ecoclass endpoint - geojson to WSS /…
Browse files Browse the repository at this point in the history
… EDIT covertype
  • Loading branch information
GondekNP committed Jan 16, 2024
1 parent 4a9f785 commit 52a0cfd
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 8 deletions.
34 changes: 29 additions & 5 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def analyze_burn(body: AnaylzeBurnPOSTBody, sftp_client: SFTPClient = Depends(ge
class QuerySoilPOSTBody(BaseModel):
geojson: dict
fire_event_name: str
affiliation: str

@app.post('/api/query-soil/get-esa-mapunitid-poly')
def get_esa_mapunitid_poly(body: QuerySoilPOSTBody):
Expand Down Expand Up @@ -200,15 +201,18 @@ def get_ecoclass_info(ecoclassid: str = Query(...)):

# TODO: refactor out the low level endpoints and rename others (this isn't really an `analysis` but it does compose a lot of logic like `analyze-burn`)
@app.post("/api/query-soil/analyze-ecoclass")
def analyze_ecoclass(body: QuerySoilPOSTBody):
def analyze_ecoclass(body: QuerySoilPOSTBody, sftp_client: SFTPClient = Depends(get_sftp_client)):
fire_event_name = body.fire_event_name
geojson = body.geojson
affiliation = body.affiliation

try:
mapunit_gdf = sdm_get_esa_mapunitid_poly(geojson)

mu_pair_tuples = [(musynm, nationalmusym) for musynm, nationalmusym, __mukey in mapunit_gdf.index.to_list()]

# TODO: gross type conversion - preserving the pydantic model for now in case lower level calls are useful, but will want to decide later
mu_pair_tuples = [MUPair(mu_pair=(str(row['nationalmusym']), str(row['musym']))) for _, row in mapunit_gdf.iterrows()]
# mu_pair_tuples = [MUPair(mu_pair=(str(row['nationalmusym']), str(row['musym']))) for _, row in mapunit_gdf.iterrows()]

mrla_df = sdm_get_ecoclassid_from_mu_info(mu_pair_tuples)

Expand All @@ -222,10 +226,30 @@ def analyze_ecoclass(body: QuerySoilPOSTBody):
edit_ecoclass_df_row_dicts.append(edit_ecoclass_df_row_dict)
else:
logger.log_text(f"Missing: {edit_ecoclass_json} doesn't exist within EDIT backend")
edit_ecoclass_df = pd.DataFrame(edit_ecoclass_df_row_dicts)
edit_ecoclass_df = pd.DataFrame(edit_ecoclass_df_row_dicts).set_index('ecoclassid')

# join mapunitids with link table for ecoclassids
mapunit_with_ecoclassid_df = mapunit_gdf.join(mrla_df).set_index('ecoclassid')
mapunit_with_ecoclassid_df.drop(['spatialversion', 'AoiPartName', 'MLRA', 'MLRA_Name'], axis = 'columns', inplace = True)

# join ecoclassids with edit ecoclass info, to get spatial ecoclass info
edit_ecoclass_geojson = mapunit_with_ecoclassid_df.join(edit_ecoclass_df, how='left').to_json()

# save the ecoclass_geojson to the FTP server
with tempfile.NamedTemporaryFile(suffix=".geojson", delete=False) as tmp:
tmp_geojson_path = tmp.name
with open(tmp_geojson_path, "w") as f:
f.write(edit_ecoclass_geojson)
sftp_client.connect()
sftp_client.upload(
source_local_path=tmp_geojson_path,
remote_path=f"{affiliation}/{fire_event_name}/ecoclass_dominant_cover.geojson"
)
sftp_client.disconnect()

logger.log_text(f"Ecoclass GeoJSON uploaded for {fire_event_name}")
return f"Ecoclass GeoJSON uploaded for {fire_event_name}", 200

return 200

except Exception as e:
logger.log_text(f"Error: {e}")
return f"Error: {e}", 400
Expand Down
11 changes: 8 additions & 3 deletions src/lib/query_soil.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ def sdm_get_esa_mapunitid_poly(geojson):
# Swap x and y coordinates, as GML2 is lon, lat and everything else is lat, lon
mapunit_gdf.geometry = mapunit_gdf.geometry.map(lambda polygon: transform(lambda x, y: (y, x), polygon))

# Set composite key
mapunit_gdf['musym'] = mapunit_gdf['musym'].astype(str)
mapunit_gdf.set_index(['musym', 'nationalmusym', 'mukey'], inplace=True)

return mapunit_gdf

elif response.status_code == 400:
Expand Down Expand Up @@ -136,10 +140,10 @@ def sdm_get_ecoclassid_from_mu_info(mu_info_list):
GROUP BY lao.areasymbol, lao.areaname, ecoclassid, ecoclassname, muname, mu.mukey, musym, nationalmusym, legend.areasymbol, legend.areaname
ORDER BY lao.areasymbol ASC, ecoclassid
"""
mu_pairs = [mu_info.mu_pair for mu_info in mu_info_list]
# mu_pairs = [mu_info.mu_pair for mu_info in mu_info_list]

# TODO: Hacky SQL 98 solution to lack of tuples (should revist)
conditions = ' OR '.join("(nationalmusym = '{}' AND musym = '{}')".format(nationalmusym, musym) for nationalmusym, musym in mu_pairs)
conditions = ' OR '.join("(musym = '{}' AND nationalmusym = '{}')".format(nationalmusym, musym) for nationalmusym, musym in mu_info_list)
query = SQL_QUERY.format(conditions)
query = ' '.join(query.split()) # remove newlines and extra spaces
query = urllib.parse.quote_plus(query)
Expand All @@ -154,7 +158,8 @@ def sdm_get_ecoclassid_from_mu_info(mu_info_list):
mu_info_df.columns = mu_info_df.iloc[0]
mu_info_df = mu_info_df[1:]
mu_info_df = mu_info_df.reset_index(drop=True)

mu_info_df.set_index(['musym', 'nationalmusym', 'mukey'], inplace=True)

return mu_info_df
else:
raise Exception(f"Error in SDM: {response.status_code}, {response.content}")
Expand Down

0 comments on commit 52a0cfd

Please sign in to comment.