Skip to content

Commit

Permalink
go thru regular workflow to test boundaries in ui for derived bounds
Browse files Browse the repository at this point in the history
  • Loading branch information
GondekNP committed Jan 25, 2024
2 parents 36db13b + 85b4839 commit 94388b9
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 61 deletions.
2 changes: 1 addition & 1 deletion app.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def analyze_burn(
# )
# sftp_client.disconnect()

# TODO: Excessive SFTP connections, refactor to use a context manager
# TODO [#15]: Excessive SFTP connections, refactor to use a context manager
# Overly conservative, connections and disconnects - likely avoided entirely by smart-open
# but if not, should be refactored to use a context manager

Expand Down
8 changes: 4 additions & 4 deletions src/lib/query_sentinel.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ def __init__(
print("Initialized Sentinel2Client with bounds: {}".format(self.bbox))

def set_boundary(self, geojson_bounds):
gpd.GeoDataFrame.from_features(geojson_bounds)
boundary_gpd = gpd.GeoDataFrame.from_features(geojson_bounds)
# TODO [#7]: Generalize Sentinel2Client to accept any CRS
# This is hard-coded to assume 4326 - when we draw an AOI, we will change this logic depending on what makes frontend sense
if not geojson_bounds.crs:
geojson_bounds = geojson_bounds.set_crs("EPSG:4326")
if not boundary_gpd.crs:
geojson_bounds = boundary_gpd.set_crs("EPSG:4326")
self.geojson_bounds = geojson_bounds.to_crs(self.crs)

geojson_bbox = geojson_bounds.bounds.to_numpy()[0]
Expand Down Expand Up @@ -190,7 +190,7 @@ def classify(self, thresholds, threshold_source, burn_metric="dnbr"):

def derive_boundary(self, metric_name="rbr", threshold=0.15):
metric_layer = self.metrics_stack.sel(burn_metric=metric_name)
boundary = metric_layer.where(metric_layer < threshold, 0)
boundary = metric_layer > threshold

# convert to geojson
boundary = boundary.rio.clip(
Expand Down
50 changes: 28 additions & 22 deletions src/static/upload/upload.html
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@
// Get the drawn shape
var drawnGeojson = editableLayers.toGeoJSON();
formData.append("geojson", JSON.stringify(drawnGeojson));
var deriveBoundary = true;

// Make a request to upload the drawn shape
var upload = $.ajax({
Expand All @@ -256,7 +257,8 @@
} else if ($("#shp_zip")[0].files.length > 0) {
// Add the shapefile to the form data
formData.append("shapefile", $("#shp_zip")[0].files[0]);

formData.append("derive_boundary", false);
var deriveBoundary = false;
// Make a request to upload the shapefile
var upload = $.ajax({
url: `/api/upload-shapefile-zip`,
Expand All @@ -281,7 +283,7 @@
console.log("upload success", uploadResponse);

// Make a request to analyze the burn
$.ajax({
var analyzeBurn = $.ajax({
url: "/api/query-satellite/analyze-burn",
type: "post",
dataType: "json",
Expand All @@ -290,6 +292,7 @@
geojson: uploadResponse.geojson,
fire_event_name: $("#fire_event_name").val(),
affiliation: $("#affiliation").val(),
derive_boundary: deriveBoundary,
date_ranges: {
prefire: [$("#prefire-start").val(), $("#prefire-end").val()],
postfire: [
Expand All @@ -309,26 +312,29 @@
},
});

// Make a request to analyze the ecoclassification
$.ajax({
url: "/api/query-soil/analyze-ecoclass",
type: "post",
dataType: "json",
contentType: "application/json",
data: JSON.stringify({
geojson: uploadResponse.geojson,
fire_event_name: $("#fire_event_name").val(),
affiliation: $("#affiliation").val(),
}),
success: function () {
$("#ecoclass-analysis-loading").hide();
$("#ecoclass-analysis-success").show();
},
error: function () {
console.error(error);
$("#ecoclass-analysis-loading").hide();
alert("Ecoclass analysis failed");
},
// Make a request to analyze the ecoclassification, if analysis is successful
$.when(analyzeBurn).done(function (analysisResponse) {
console.log("burn analysis success", analysisResponse);
$.ajax({
url: "/api/query-soil/analyze-ecoclass",
type: "post",
dataType: "json",
contentType: "application/json",
data: JSON.stringify({
geojson: uploadResponse.geojson,
fire_event_name: $("#fire_event_name").val(),
affiliation: $("#affiliation").val(),
}),
success: function () {
$("#ecoclass-analysis-loading").hide();
$("#ecoclass-analysis-success").show();
},
error: function () {
console.error(error);
$("#ecoclass-analysis-loading").hide();
alert("Ecoclass analysis failed");
},
});
});
})
.fail(function (error) {
Expand Down
97 changes: 63 additions & 34 deletions src/util/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from google.cloud import logging as cloud_logging


# TODO [#9]: Convert to agnostic Boto client
# TODO [#9]: Convert to agnostic Boto client
# Use the slick smart-open library to handle S3 connections. This maintains the agnostic nature
# of sftp, not tied to any specific cloud provider, but is way more efficient than paramiko/sftp in terms of $$
class SFTPClient:
Expand All @@ -29,16 +29,20 @@ def __init__(self, hostname, username, private_key, port=22):
self.available_cogs = None

# Set up logging
logging_client = cloud_logging.Client(project='dse-nps')
logging_client = cloud_logging.Client(project="dse-nps")
log_name = "burn-backend"
self.logger = logging_client.logger(log_name)

# Route Paramiko logs to Google Cloud Logging
paramiko_logger = logging.getLogger("paramiko")
paramiko_logger.setLevel(logging.DEBUG)
paramiko_logger.addHandler(cloud_logging.handlers.CloudLoggingHandler(logging_client, name=log_name))
paramiko_logger.addHandler(
cloud_logging.handlers.CloudLoggingHandler(logging_client, name=log_name)
)

self.logger.log_text(f"Initialized SFTPClient for {self.hostname} as {self.username}")
self.logger.log_text(
f"Initialized SFTPClient for {self.hostname} as {self.username}"
)

def connect(self):
"""Connects to the sftp server and returns the sftp connection object"""
Expand Down Expand Up @@ -85,7 +89,6 @@ def download(self, remote_path, target_local_path):
"""

try:

# Create the target directory if it does not exist
path, _ = os.path.split(target_local_path)
if not os.path.isdir(path):
Expand Down Expand Up @@ -132,89 +135,115 @@ def update_available_cogs(self):
self.available_cogs = self.get_available_cogs()
self.disconnect()

def upload_cogs(self, metrics_stack, fire_event_name, prefire_date_range, postfire_date_range, affiliation):

def upload_cogs(
self,
metrics_stack,
fire_event_name,
prefire_date_range,
postfire_date_range,
affiliation,
):
with tempfile.TemporaryDirectory() as tmpdir:

for band_name in metrics_stack.burn_metric.to_index():
# Save the band as a local COG
local_cog_path = os.path.join(tmpdir, f"{band_name}.tif")
band_cog = metrics_stack.sel(burn_metric = band_name).rio
band_cog = metrics_stack.sel(burn_metric=band_name).rio
band_cog.to_raster(local_cog_path, driver="GTiff")

# Update the COG with overviews, for faster loading at lower zoom levels
self.logger.log_text(f"Updating {band_name} with overviews")
with rasterio.open(local_cog_path, 'r+') as ds:
with rasterio.open(local_cog_path, "r+") as ds:
ds.build_overviews([2, 4, 8, 16, 32], Resampling.nearest)
ds.update_tags(ns='rio_overview', resampling='nearest')
ds.update_tags(ns="rio_overview", resampling="nearest")

self.upload(
source_local_path=local_cog_path,
remote_path=f"{affiliation}/{fire_event_name}/{band_name}.tif",
)

# Upload the difference between dNBR and RBR
local_cog_path = os.path.join(tmpdir, f"pct_change_dnbr_rbr.tif")
pct_change = (metrics_stack.sel(burn_metric="rbr") - metrics_stack.sel(burn_metric="dnbr")) / \
metrics_stack.sel(burn_metric="dnbr") * 100
pct_change = (
(
metrics_stack.sel(burn_metric="rbr")
- metrics_stack.sel(burn_metric="dnbr")
)
/ metrics_stack.sel(burn_metric="dnbr")
* 100
)
pct_change.rio.to_raster(local_cog_path, driver="GTiff")
self.upload(
source_local_path=local_cog_path,
remote_path=f"{affiliation}/{fire_event_name}/pct_change_dnbr_rbr.tif",
)


def update_manifest(self, fire_event_name, bounds, prefire_date_range, postfire_date_range, affiliation):
def update_manifest(
self,
fire_event_name,
bounds,
prefire_date_range,
postfire_date_range,
affiliation,
):
with tempfile.TemporaryDirectory() as tmpdir:

manifest = self.get_manifest()

if fire_event_name in manifest:
self.logger.log_text(f"Fire event {fire_event_name} already exists in manifest. Overwriting.")
self.logger.log_text(
f"Fire event {fire_event_name} already exists in manifest. Overwriting."
)
del manifest[fire_event_name]

manifest[fire_event_name] = {
'bounds': bounds,
'prefire_date_range': prefire_date_range,
'postfire_date_range': postfire_date_range,
'last_updated': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
'requester_affiliation': affiliation
"bounds": bounds,
"prefire_date_range": prefire_date_range,
"postfire_date_range": postfire_date_range,
"last_updated": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"requester_affiliation": affiliation,
}

# Upload the manifest to our SFTP server
tmp_manifest_path = os.path.join(tmpdir, 'manifest_updated.json')
with open(tmp_manifest_path, 'w') as f:
tmp_manifest_path = os.path.join(tmpdir, "manifest_updated.json")
with open(tmp_manifest_path, "w") as f:
json.dump(manifest, f)
self.upload(
source_local_path=tmp_manifest_path,
remote_path='manifest.json'
source_local_path=tmp_manifest_path, remote_path="manifest.json"
)
self.logger.log_text(f"Uploaded/updated manifest.json")

def upload_fire_event(self, metrics_stack, fire_event_name, prefire_date_range, postfire_date_range, affiliation):
def upload_fire_event(
self,
metrics_stack,
fire_event_name,
prefire_date_range,
postfire_date_range,
affiliation,
derive_boundary,
):
self.logger.log_text(f"Uploading fire event {fire_event_name}")

self.upload_cogs(
metrics_stack=metrics_stack,
fire_event_name=fire_event_name,
prefire_date_range=prefire_date_range,
postfire_date_range=postfire_date_range,
affiliation=affiliation
affiliation=affiliation,
)

bounds = [round(pos, 4) for pos in metrics_stack.rio.bounds()]

self.update_manifest(
fire_event_name=fire_event_name,
fire_event_name=fire_event_name,
bounds=bounds,
prefire_date_range=prefire_date_range,
postfire_date_range=postfire_date_range,
affiliation=affiliation
affiliation=affiliation,
derive_boundary=derive_boundary,
)

def get_manifest(self):
with tempfile.TemporaryDirectory() as tmpdir:
self.download('manifest.json', tmpdir + 'tmp_manifest.json')
self.download("manifest.json", tmpdir + "tmp_manifest.json")
self.logger.log_text(f"Got manifest.json")
manifest = json.load(open(tmpdir + 'tmp_manifest.json', 'r'))
return manifest
manifest = json.load(open(tmpdir + "tmp_manifest.json", "r"))
return manifest

0 comments on commit 94388b9

Please sign in to comment.