-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #230 from raphaelrpl/b-0.8
Add support to publish HDF files (modis) (close #221)
- Loading branch information
Showing
4 changed files
with
124 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
# | ||
# This file is part of Brazil Data Cube Collection Builder. | ||
# Copyright (C) 2019-2020 INPE. | ||
# | ||
# Brazil Data Cube Collection Builder is free software; you can redistribute it and/or modify it | ||
# under the terms of the MIT License; see LICENSE file for more details. | ||
# | ||
|
||
"""Module to deal with Hierarchical Data Format (HDF4/HDF5).""" | ||
|
||
from pathlib import Path | ||
from typing import NamedTuple | ||
|
||
from osgeo import gdal | ||
|
||
DTYPES = dict( | ||
uint8=gdal.GDT_Byte, | ||
int16=gdal.GDT_Int16, | ||
uint16=gdal.GDT_UInt16, | ||
int32=gdal.GDT_Int32, | ||
uint32=gdal.GDT_UInt32, | ||
) | ||
|
||
ItemResult = NamedTuple('ItemResult', [('files', dict), ('cloud_cover', float)]) | ||
"""Type to represent the extracted scenes from an Hierarchical Data Format (HDF4/HDF5).""" | ||
|
||
|
||
def to_geotiff(hdf_path: str, destination: str) -> ItemResult: | ||
"""Convert a Hierarchical Data Format (HDF4/HDF5) file to set of GeoTIFF files. | ||
Args: | ||
hdf_path (str) - Path to the HDF file to be extracted | ||
destination (str) - The destination folder. | ||
Note: | ||
The output GeoTIFF files are not Cloud Optimized GeoTIFF (COG). | ||
Tip: | ||
You may use the utility :meth:bdc_collection_builder.collections.utils.generate_cogs to generate Cloud Optimized GeoTIFF files. | ||
Raises: | ||
IOError When the input file is not a valid data set. | ||
Returns: | ||
ItemResult A struct containing the extracted files | ||
""" | ||
data_set = gdal.Open(hdf_path) | ||
|
||
if data_set is None: | ||
raise IOError(f'Could not open {str(hdf_path)}') | ||
|
||
base_name = Path(hdf_path).stem | ||
metadata = data_set.GetMetadata() | ||
cloud_cover = float(metadata.get('QAPERCENTCLOUDCOVER.1') or 0) | ||
output_path = Path(destination) | ||
|
||
geotiff_driver = gdal.GetDriverByName('GTiff') | ||
files = dict() | ||
# Band iterator index to retrieve metadata value | ||
band_idx = 1 | ||
for data_set_name, _ in data_set.GetSubDatasets(): | ||
formal_name = metadata[f'PARAMETERNAME.{band_idx}'] | ||
band_name = '_'.join(formal_name.split(' ')[3:]) | ||
|
||
data_set = gdal.Open(data_set_name) | ||
band = data_set.GetRasterBand(1) | ||
array = band.ReadAsArray() | ||
|
||
tiff_file = output_path / f'{base_name}_{band_name}.tif' | ||
|
||
output_data_set = geotiff_driver.Create( | ||
str(tiff_file), | ||
data_set.RasterXSize, | ||
data_set.RasterYSize, | ||
1, | ||
DTYPES[array.dtype.name] | ||
) | ||
output_data_set_band = output_data_set.GetRasterBand(1) | ||
output_data_set.SetGeoTransform(data_set.GetGeoTransform()) | ||
output_data_set.SetProjection(data_set.GetProjection()) | ||
output_data_set.SetMetadata(metadata) | ||
output_data_set_band.WriteArray(array) | ||
output_data_set_band.SetNoDataValue(0) | ||
|
||
files[band_name] = str(tiff_file) | ||
|
||
output_data_set_band = None | ||
output_data_set = None | ||
|
||
band_idx += 1 | ||
|
||
return ItemResult(files=files, cloud_cover=cloud_cover) | ||
|
||
|
||
def is_valid(file_path: str) -> bool: | ||
"""Check the HDF file integrity with GDAL library.""" | ||
ds = gdal.Open(file_path) | ||
|
||
return ds is not None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -39,7 +39,11 @@ | |
extras_require = { | ||
'docs': docs_require, | ||
'tests': tests_require, | ||
'harmonization': harmonization_require | ||
'harmonization': harmonization_require, | ||
'gdal': [ | ||
'GDAL>=2.3', | ||
'bdc-collectors @ git+git://github.com/brazil-data-cube/[email protected]#egg=bdc-collectors[modis]', | ||
] | ||
} | ||
|
||
extras_require['all'] = [req for exts, reqs in extras_require.items() for req in reqs] | ||
|