diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f95d8a5..05cf1c0 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,9 +11,17 @@ updates: interval: weekly labels: - bumpless + groups: + pip-deps: + patterns: + - "*" - package-ecosystem: github-actions directory: / schedule: interval: weekly labels: - bumpless + groups: + github-actions-deps: + patterns: + - "*" diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ad38bb..62b535d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.1.4] + +### Added +- Added functionality to `prep_rtc.py` and `upload_rtc.py` to accept SLCs or co-pol bursts. +- Add `--num-workers` as `prep_rtc` cli param + +### Changed +- Updated DEM fetching/tiling strategy to match OPERA's. +- Updated DEM bounds buffer to 100 km from 0.025 degrees +- Updated burst database to the OPERA-provided burst_db_0.2.0_230831-bbox-only.sqlite file +- Updated our [PGE RunConfig template](./src/hyp3_opera_rtc/templates/pge.yml.j2) to more closely align with the [upstream version](https://github.com/nasa/opera-sds-pcm/blob/9bd74458957197b0c6680540c8d09c26ffab81df/conf/RunConfig.yaml.L2_RTC_S1.jinja2.tmpl). + ## [0.1.3] ### Changed diff --git a/Dockerfile b/Dockerfile index a235dbd..287560b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ USER root RUN chown rtc_user:rtc_user /home/rtc_user/scratch USER rtc_user -RUN curl https://asf-dem-west.s3.amazonaws.com/AUX/opera-burst-bbox-only.sqlite3 -o /home/rtc_user/opera-burst-bbox-only.sqlite3 +RUN curl https://asf-dem-west.s3.amazonaws.com/AUX/burst_db_0.2.0_230831-bbox-only.sqlite -o /home/rtc_user/burst_db_0.2.0_230831-bbox-only.sqlite COPY --chown=rtc_user:rtc_user . /home/rtc_user/hyp3-opera-rtc/ RUN conda env create -f /home/rtc_user/hyp3-opera-rtc/environment.yml && \ diff --git a/README.md b/README.md index 713c386..7dbd7c3 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ docker run -it --rm \ [CO_POL_GRANULE] ``` -Where you replace `[CO_POL_GRANULE]` with the name of the Sentinel-1 co-pol burst SLC scene +Where you replace `[CO_POL_GRANULE]` with the name of the Sentinel-1 co-pol burst or SLC scene for which to generate the OPERA RTC product. Here are some useful examples: @@ -49,7 +49,7 @@ Here are some useful examples: ## Architecture -The plugin is composed of three nested docker environments that depend on eachother. They are laid out as below: +The plugin is composed of three nested docker environments that depend on each other. They are laid out as below: ``` +-------------------------+ diff --git a/requirements-static.txt b/requirements-static.txt index 989a2ca..32b7581 100644 --- a/requirements-static.txt +++ b/requirements-static.txt @@ -1,5 +1,5 @@ -ruff==0.11.11 -mypy==1.15.0 +ruff==0.12.2 +mypy==1.16.1 lxml-stubs types-shapely types-requests diff --git a/src/hyp3_opera_rtc/dem.py b/src/hyp3_opera_rtc/dem.py index 0a6ba05..cff0887 100644 --- a/src/hyp3_opera_rtc/dem.py +++ b/src/hyp3_opera_rtc/dem.py @@ -1,25 +1,59 @@ -from concurrent.futures import ThreadPoolExecutor -from itertools import product +from collections.abc import Callable from pathlib import Path -from tempfile import TemporaryDirectory +from tempfile import NamedTemporaryFile import numpy as np import shapely import shapely.ops import shapely.wkt -from hyp3lib.fetch import download_file -from osgeo import gdal -from shapely.geometry import LinearRing, Polygon +from hyp3lib.util import GDALConfigManager +from osgeo import gdal, osr +from shapely.geometry import LinearRing, Polygon, box gdal.UseExceptions() -URL = 'https://nisar.asf.earthdatacloud.nasa.gov/STATIC/DEM/v1.1/EPSG4326' +EARTH_APPROX_CIRCUMFERENCE_KM = 40075017.0 +EARTH_RADIUS_KM = EARTH_APPROX_CIRCUMFERENCE_KM / (2 * np.pi) +DEM_MARGIN_KM = 200 -def check_antimeridean(poly: Polygon) -> list[Polygon]: + +def margin_km_to_deg(margin_in_km: float) -> float: + """Converts a margin value from kilometers to degrees.""" + km_to_deg_at_equator = 1000.0 / (EARTH_APPROX_CIRCUMFERENCE_KM / 360.0) + margin_in_deg = margin_in_km * km_to_deg_at_equator + return margin_in_deg + + +def margin_km_to_longitude_deg(margin_in_km: float, lat: float) -> float: + """Converts a margin value from kilometers to degrees as a function of latitude.""" + delta_lon = 180 * 1000 * margin_in_km / (np.pi * EARTH_RADIUS_KM * np.cos(np.pi * lat / 180)) + return delta_lon + + +def polygon_from_bounds(bounds: tuple[float, float, float, float]) -> Polygon: + """Create a polygon (EPSG:4326) from the lat/lon coordinates corresponding to a provided bounding box.""" + lon_min, lat_min, lon_max, lat_max = bounds + # note we can also use the center lat here + lat_worst_case = max([lat_min, lat_max]) + lat_margin = margin_km_to_deg(DEM_MARGIN_KM) + lon_margin = margin_km_to_longitude_deg(DEM_MARGIN_KM, lat=lat_worst_case) + # Check if the bbox crosses the antimeridian and apply the margin accordingly + # so that any resultant DEM is split properly by check_dateline + if lon_max - lon_min > 180: + lon_min, lon_max = lon_max, lon_min + + poly = box( + lon_min - lon_margin, max([lat_min - lat_margin, -90]), lon_max + lon_margin, min([lat_max + lat_margin, 90]) + ) + return poly + + +def split_antimeridian(poly: Polygon) -> list[Polygon]: + """Check if the provided polygon crosses the antimeridian and split it if it does.""" x_min, _, x_max, _ = poly.bounds - # Check anitmeridean crossing + # Check anitmeridian crossing if (x_max - x_min > 180.0) or (x_min <= 180.0 <= x_max): dateline = shapely.wkt.loads('LINESTRING( 180.0 -90.0, 180.0 90.0)') @@ -54,38 +88,93 @@ def check_antimeridean(poly: Polygon) -> list[Polygon]: return polys -def get_dem_granule_url(lat: int, lon: int) -> str: - lat_tens = np.floor_divide(lat, 10) * 10 - lat_cardinal = 'S' if lat_tens < 0 else 'N' - - lon_tens = np.floor_divide(lon, 20) * 20 - lon_cardinal = 'W' if lon_tens < 0 else 'E' - - prefix = f'{lat_cardinal}{np.abs(lat_tens):02d}_{lon_cardinal}{np.abs(lon_tens):03d}' - filename = f'DEM_{lat_cardinal}{np.abs(lat):02d}_00_{lon_cardinal}{np.abs(lon):03d}_00.tif' - file_url = f'{URL}/{prefix}/{filename}' - return file_url - - -def get_latlon_pairs(polygon: Polygon) -> list: - minx, miny, maxx, maxy = polygon.bounds - lats = np.arange(np.floor(miny), np.floor(maxy) + 1).astype(int) - lons = np.arange(np.floor(minx), np.floor(maxx) + 1).astype(int) - return list(product(lats, lons)) - - -def download_opera_dem_for_footprint(output_path: Path, footprint: Polygon) -> None: - footprints = check_antimeridean(footprint) - latlon_pairs = [] - for footprint in footprints: - latlon_pairs += get_latlon_pairs(footprint) - urls = [get_dem_granule_url(lat, lon) for lat, lon in latlon_pairs] - - with TemporaryDirectory() as tmpdir_str: - tmpdir = Path(tmpdir_str) - with ThreadPoolExecutor(max_workers=4) as executor: - executor.map(lambda url: download_file(url, str(tmpdir)), urls) - vrt_filepath = str(tmpdir / 'dem.vrt') - input_files = [str(file) for file in tmpdir.glob('*.tif')] - gdal.BuildVRT(vrt_filepath, input_files) - gdal.Translate(str(output_path), vrt_filepath, format='GTiff') +def snap_coord(val: float, snap: float, offset: float, round_func: Callable) -> float: + return round_func(float(val - offset) / snap) * snap + offset + + +def translate_dem(vrt_filename: str, output_path: str, bounds: tuple[float, float, float, float]) -> None: + """Write a local subset of the OPERA DEM for a region matching the provided bounds. + + Params: + vrt_filename: Path to the input VRT file + output_path: Path to the translated output GTiff file + bounds: Bounding box in the form of (lon_min, lat_min, lon_max, lat_max) + """ + ds = gdal.Open(vrt_filename, gdal.GA_ReadOnly) + + # update cropping coordinates to not exceed the input DEM bounding box + input_x_min, xres, _, input_y_max, _, yres = ds.GetGeoTransform() + length = ds.GetRasterBand(1).YSize + width = ds.GetRasterBand(1).XSize + + # Snap edge coordinates using the DEM pixel spacing + # (xres and yres) and starting coordinates (input_x_min and + # input_x_max). Maximum values are rounded using np.ceil + # and minimum values are rounded using np.floor + x_min, y_min, x_max, y_max = bounds + snapped_x_min = snap_coord(x_min, xres, input_x_min, np.floor) + snapped_x_max = snap_coord(x_max, xres, input_x_min, np.ceil) + snapped_y_min = snap_coord(y_min, yres, input_y_max, np.floor) + snapped_y_max = snap_coord(y_max, yres, input_y_max, np.ceil) + + input_y_min = input_y_max + length * yres + input_x_max = input_x_min + width * xres + + adjusted_x_min = max(snapped_x_min, input_x_min) + adjusted_x_max = min(snapped_x_max, input_x_max) + adjusted_y_min = max(snapped_y_min, input_y_min) + adjusted_y_max = min(snapped_y_max, input_y_max) + + try: + gdal.Translate( + output_path, ds, format='GTiff', projWin=[adjusted_x_min, adjusted_y_max, adjusted_x_max, adjusted_y_min] + ) + except RuntimeError as err: + if 'negative width and/or height' in str(err): + gdal.Translate(output_path, ds, format='GTiff', projWin=[x_min, y_max, x_max, y_min]) + else: + raise + + # stage_dem.py takes a bbox as an input. The longitude coordinates + # of this bbox are unwrapped i.e., range in [0, 360] deg. If the + # bbox crosses the anti-meridian, the script divides it in two + # bboxes neighboring the anti-meridian. Here, x_min and x_max + # represent the min and max longitude coordinates of one of these + # bboxes. We Add 360 deg if the min longitude of the downloaded DEM + # tile is < 180 deg i.e., there is a dateline crossing. + # This ensures that the mosaicked DEM VRT will span a min + # range of longitudes rather than the full [-180, 180] deg + sr = osr.SpatialReference(ds.GetProjection()) + epsg_str = sr.GetAttrValue('AUTHORITY', 1) + + if x_min <= -180.0 and epsg_str == '4326': + ds = gdal.Open(output_path, gdal.GA_Update) + geotransform = list(ds.GetGeoTransform()) + geotransform[0] += 360.0 + ds.SetGeoTransform(tuple(geotransform)) + + +def download_opera_dem_for_footprint(outfile: Path, bounds: tuple[float, float, float, float]) -> None: + """Download a DEM from the specified S3 bucket. + + Params: + outfile: Path to the where the output DEM file is to be staged. + bounds: Bounding box in the form of (lon_min, lat_min, lon_max, lat_max). + """ + poly = polygon_from_bounds(bounds) + polys = split_antimeridian(poly) + dem_list = [] + + with NamedTemporaryFile(suffix='.txt') as cookie_file: + with GDALConfigManager( + GDAL_HTTP_COOKIEJAR=cookie_file.name, + GDAL_HTTP_COOKIEFILE=cookie_file.name, + GDAL_DISABLE_READDIR_ON_OPEN='EMPTY_DIR', + ): + vrt_filename = '/vsicurl/https://nisar.asf.earthdatacloud.nasa.gov/STATIC/DEM/v1.1/EPSG4326/EPSG4326.vrt' + for idx, poly in enumerate(polys): + output_path = str(outfile.parent / f'{outfile.stem}_{idx}.tif') + dem_list.append(output_path) + translate_dem(vrt_filename, output_path, poly.bounds) + + gdal.BuildVRT(str(outfile), dem_list) diff --git a/src/hyp3_opera_rtc/prep_rtc.py b/src/hyp3_opera_rtc/prep_rtc.py index eb72f05..ccfcdd8 100644 --- a/src/hyp3_opera_rtc/prep_rtc.py +++ b/src/hyp3_opera_rtc/prep_rtc.py @@ -11,7 +11,6 @@ from hyp3lib.fetch import download_file from hyp3lib.scene import get_download_url from jinja2 import Template -from shapely.geometry import Polygon, box from hyp3_opera_rtc import dem, orbit @@ -20,46 +19,69 @@ def prep_burst_db(save_dir: Path) -> Path: - db_filename = 'opera-burst-bbox-only.sqlite3' + db_filename = 'burst_db_0.2.0_230831-bbox-only.sqlite' db_path = save_dir / db_filename - shutil.copy(Path.home() / db_filename, db_path) - return db_path -def get_s1_granule_bbox(granule_path: Path, buffer: float = 0.025) -> Polygon: - with ZipFile(granule_path, 'r') as z: - manifest_path = [x for x in z.namelist() if x.endswith('manifest.safe')][0] - with z.open(manifest_path) as m: - manifest = ET.parse(m).getroot() +def bounding_box_from_slc_granule(safe_file_path: Path) -> tuple[float, float, float, float]: + """Extracts the bounding box footprint from the given SLC SAFE archive.""" + safe_file_name = safe_file_path.stem - frame_element = next(x for x in manifest.findall('.//metadataObject') if x.get('ID') == 'measurementFrameSet') - coords_element = frame_element.find('.//{http://www.opengis.net/gml}coordinates') - assert coords_element is not None + with ZipFile(safe_file_path) as myzip: + with myzip.open(f'{safe_file_name}.SAFE/manifest.safe', 'r') as infile: + manifest_tree = ET.parse(infile) - frame_string = coords_element.text - assert frame_string is not None + coordinates_elem = manifest_tree.xpath('.//*[local-name()="coordinates"]') + if coordinates_elem is None: + raise RuntimeError( + 'Could not find gml:coordinates element within the manifest.safe ' + 'of the provided SAFE archive, cannot determine DEM bounding box.' + ) - coord_strings = [pair.split(',') for pair in frame_string.split(' ')] - coords = [(float(lon), float(lat)) for lat, lon in coord_strings] - footprint = Polygon(coords).buffer(buffer) - return box(*footprint.bounds) + assert isinstance(coordinates_elem, list) + assert isinstance(coordinates_elem[0], ET._Element) + coordinates_str = coordinates_elem[0].text + assert isinstance(coordinates_str, str) + coordinates = coordinates_str.split() + lats = [float(coordinate.split(',')[0]) for coordinate in coordinates] + lons = [float(coordinate.split(',')[-1]) for coordinate in coordinates] + lat_min = min(lats) + lat_max = max(lats) + lon_min = min(lons) + lon_max = max(lons) -def get_granule_cmr(granule: str) -> dict: - params = (('short_name', 'SENTINEL-1_BURSTS'), ('granule_ur', granule)) - response = requests.get(CMR_URL, params=params) - response.raise_for_status() - return response.json() + # Check if the bbox crosses the antimeridian and "unwrap" the coordinates + # so that any resultant DEM is split properly by check_dateline + if lon_max - lon_min > 180: + lons = [lon + 360 if lon < 0 else lon for lon in lons] + lon_min = min(lons) + lon_max = max(lons) + + return (lon_min, lat_min, lon_max, lat_max) # WSEN order + + +def get_burst_params(granule: str) -> tuple[str, str]: + response = get_burst_from_cmr(granule) + return parse_response_for_burst_params(response) + + +def get_burst_from_cmr(granule: str) -> dict: + pol = granule.split('_')[4] + if pol in {'VH', 'HV'}: + raise ValueError(f'{granule} has polarization {pol}, must be VV or HH') + response = query_cmr((('short_name', 'SENTINEL-1_BURSTS'), ('granule_ur', granule))) + granule_exists = bool(response['items']) + if not granule_exists: + raise ValueError(f'Granule does not exist: {granule}') -def granule_exists(granule: str) -> bool: - response = get_granule_cmr(granule) - return bool(response['items']) + return response -def parse_response_for_slc_params(response: dict) -> tuple[str, str]: +def parse_response_for_burst_params(response: dict) -> tuple[str, str]: assert len(response['items']) == 1 item = response['items'][0] @@ -74,18 +96,26 @@ def parse_response_for_slc_params(response: dict) -> tuple[str, str]: return source_slc, f't{opera_burst_id.lower()}' -def get_granule_slc_params(granule: str) -> tuple[str, str]: - response = get_granule_cmr(granule) - return parse_response_for_slc_params(response) - - -def validate_co_pol_granule(granule: str) -> None: - pol = granule.split('_')[4] - if pol not in {'VV', 'HH'}: +def validate_slc(granule: str) -> str: + pol = granule.split('_')[4][2:4] + if pol in {'VH', 'HV'}: raise ValueError(f'{granule} has polarization {pol}, must be VV or HH') - if not granule_exists(granule): + + response = query_cmr( + (('short_name', 'SENTINEL-1*'), ('options[short_name][pattern]', 'true'), ('granule_ur', f'{granule}-SLC')) + ) + granule_exists = bool(response['items']) + if not granule_exists: raise ValueError(f'Granule does not exist: {granule}') + return granule + + +def query_cmr(params: tuple) -> dict: + response = requests.get(CMR_URL, params=params) + response.raise_for_status() + return response.json() + def get_cross_pol_name(granule: str) -> str: parts = granule.split('_') @@ -108,13 +138,15 @@ def prep_rtc( co_pol_granule: str, work_dir: Path, resolution: int = 30, + num_workers: int = 0, ) -> None: - """Prepare data for OPERA RTC processing. + """Prepare co_pol data for OPERA RTC processing. Args: - co_pol_granule: Sentinel-1 level-1 co-pol burst granule + co_pol_granule: Sentinel-1 level-1 co-pol granule (either burst or SLC) work_dir: Working directory for processing resolution: Resolution of the output RTC (m) + num_workers: Sets number of bursts to run in parallel. 0 will base it on OMP_NUM_THREADS """ scratch_dir = work_dir / 'scratch_dir' input_dir = work_dir / 'input_dir' @@ -122,9 +154,12 @@ def prep_rtc( for d in [scratch_dir, input_dir, output_dir]: d.mkdir(parents=True, exist_ok=True) - validate_co_pol_granule(co_pol_granule) + if co_pol_granule.endswith('BURST'): + source_slc, opera_burst_id = get_burst_params(co_pol_granule) + else: + validate_slc(co_pol_granule) + source_slc, opera_burst_id = co_pol_granule, None - source_slc, opera_burst_id = get_granule_slc_params(co_pol_granule) safe_path = download_file(get_download_url(source_slc), directory=str(input_dir), chunk_size=10485760) safe_path = Path(safe_path) dual_pol = safe_path.name[14] == 'D' @@ -137,7 +172,7 @@ def prep_rtc( print(f'Burst database: {db_path}') dem_path = input_dir / 'dem.tif' - granule_bbox = get_s1_granule_bbox(safe_path) + granule_bbox = bounding_box_from_slc_granule(safe_path) dem.download_opera_dem_for_footprint(dem_path, granule_bbox) print(f'Downloaded DEM: {dem_path}') @@ -146,13 +181,17 @@ def prep_rtc( 'orbit_path': str(orbit_path), 'db_path': str(db_path), 'dem_path': str(dem_path), - 'opera_burst_id': opera_burst_id, 'scratch_dir': str(scratch_dir), 'output_dir': str(output_dir), 'dual_pol': dual_pol, - 'resolution': int(resolution), + 'resolution': resolution, + 'num_workers': num_workers, + 'data_validity_start_date': '20140403', } + if opera_burst_id is not None: + runconfig_dict['opera_burst_id'] = opera_burst_id + render_template(runconfig_dict, work_dir) @@ -164,10 +203,12 @@ def main() -> None: S1_245714_IW1_20240809T141633_VV_6B31-BURST """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('co_pol_granule', help='Sentinel-1 co-pol burst granule') + parser.add_argument('co_pol_granule', help='Sentinel-1 co-pol burst granule or SLC') parser.add_argument('--work-dir', type=Path, required=True, help='Working directory for processing') parser.add_argument('--resolution', default=30, type=int, help='Resolution of the output RTC (m)') + parser.add_argument('--num-workers', default=0, type=int, help='The number of bursts to run in parallel.') + args, _ = parser.parse_known_args() username = os.getenv('EARTHDATA_USERNAME') @@ -181,7 +222,7 @@ def main() -> None: UserWarning, ) - prep_rtc(args.co_pol_granule, args.work_dir, args.resolution) + prep_rtc(args.co_pol_granule, args.work_dir, args.resolution, args.num_workers) if __name__ == '__main__': diff --git a/src/hyp3_opera_rtc/templates/pge.yml.j2 b/src/hyp3_opera_rtc/templates/pge.yml.j2 index 13bc048..b26ea48 100644 --- a/src/hyp3_opera_rtc/templates/pge.yml.j2 +++ b/src/hyp3_opera_rtc/templates/pge.yml.j2 @@ -1,251 +1,108 @@ -# Sample RunConfig for use with the RTC-S1 PGE v2.1.1 -# This RunConfig should require minimal changes in order to be used with the -# OPERA PCM. - +# RunConfig for use with the RTC-S1 PGE v2.1.1 +# +# Adapted from +# https://github.com/nasa/opera-sds-pcm/blob/9bd74458957197b0c6680540c8d09c26ffab81df/conf/RunConfig.yaml.L2_RTC_S1.jinja2.tmpl RunConfig: - # Name for the RunConfig, may be any string - Name: OPERA-RTC-S1-PGE - - Groups: - # PGE-specific RunConfig section - # This section is only used by the PGE, however, paths to inputs/outputs - # should align with the similar sections of the SAS RunConfig - PGE: - PGENameGroup: - # Name of the PGE for use with this RunConfig, should always be - # RTC_S1_PGE when using with the RTC-S1 PGE - PGEName: RTC_S1_PGE - - InputFilesGroup: - # List of input files - # Must be a list containing the path to the input SAFE zip file(s), - # as well as the path to the Orbit Ephemerides file(s) - # Paths must correspond to the file system within the Docker container - InputFilePaths: - - {{ granule_path }} - - {{ orbit_path }} - - DynamicAncillaryFilesGroup: - # Map of ancillary file types to paths to the file - # Paths must correspond to the file system within the Docker container - AncillaryFileMap: - # Path to the Digital Elevation Model - # Must be either a single .tif, or a .vrt that points to one - # or more .tif files - dem_file: {{ dem_path }} - - # Burst database, must be an .sqlite3 file - burst_database_file: {{ db_path }} - - ProductPathGroup: - # Path to where output products should be stored - # Must correspond to the file system within the Docker container, - # and must have write permissions for the User/Group used with - # the "Docker run" command - OutputProductPath: {{ output_dir }} - - # Path to a scratch directory for the PGE and SAS to store - # intermediate files that will not be needed after PGE execution - # completes - # Must correspond to the file system within the Docker container, - # and must have write permissions for the User/Group used with - # the "Docker run" command - ScratchPath: {{ scratch_dir }} - - PrimaryExecutable: - # Identifier for the PGE executable, should always be RTC_S1 for - # this PGE - ProductIdentifier: RTC_S1 - - # Product version specific to output products - ProductVersion: "1.0" - - # Path to the executable to run, path must be reachable from - # within the Docker container (i.e. findable with a "which" command) - ProgramPath: conda - - # List of command-line options to use with ProgramPath - ProgramOptions: - - run - - --no-capture-output - - -n - - RTC - - rtc_s1.py - - # The Error Code base value added to the offset values to make - # error codes unique per-PGE - ErrorCodeBase: 300000 - - # Path to the Yamale schema used to validate the SAS portion - # of the RunConfig - # Path should correspond to the file system within the Docker - # container, and typically should reference a schema file bundled - # with the opera_pge installation directory within the container - # Consult the Docker image build scripts for more info - SchemaPath: /home/rtc_user/opera/pge/rtc_s1/schema/rtc_s1_sas_schema.yaml - - # Path to the Jinja2 template used to generate the ISO xml - # metadata file - # Path should correspond to the file system within the Docker - # container, and typically should reference a template file bundled - # with the opera_pge installation directory within the container - # Consult the Docker image build scripts for more info - IsoTemplatePath: /home/rtc_user/opera/pge/rtc_s1/templates/OPERA_ISO_metadata_L2_RTC_S1_template.xml.jinja2 - - # Date field which designates the point after which the - # RTC static layer product(s) should be considered valid. - # This field must be provided for RTC-S1 jobs when static layer - # generation is enabled (see below), and must be of the form YYYYMMDD - DataValidityStartDate: 20140403 - - QAExecutable: - # Set to True to enable execution of an additional "Quality Assurance" - # application after SAS execution has completed - Enabled: False - - # Path to the executable to run, path must be reachable from - # within the Docker container (i.e. findable with a "which" command) - ProgramPath: - - # List of command-line options to use with ProgramPath - ProgramOptions: [] - - DebugLevelGroup: - # Set to True to enable Debug mode (TODO this is currently a no-op) - DebugSwitch: False - - # Set to True to have the PGE invoke the SAS/QA executables via - # a shell, rather than a Python subprocess - # This allows shell-style syntax to be used in ProgramPath and - # ProgramOptions, which can be useful for testing - ExecuteViaShell: False - - # SAS-specific RunConfig section - # Prior to SAS execution by the PGE, the section below starting at "runconfig" - # is isolated into its own YAML file for use with the SAS - SAS: - runconfig: - name: rtc_s1_workflow_default - - groups: - pge_name_group: - pge_name: RTC_S1_PGE - - input_file_group: - # Required. List of SAFE files (min=1) - safe_file_path: - - {{ granule_path }} - # Optional. Burst ID to process (empty for all bursts) - burst_id: - - {{ opera_burst_id }} - # Required. List of orbit (EOF) files (min=1) - orbit_file_path: - - {{ orbit_path }} - # Location from where the source data can be retrieved (URL or DOI) - source_data_access: "https://search.asf.alaska.edu/#/?dataset=SENTINEL-1&productTypes=SLC" - - # This section should match the DynamicAncillaryFilesGroup of the PGE RunConfig - dynamic_ancillary_file_group: - # Digital elevation model - dem_file: {{ dem_path }} - dem_file_description: "Digital Elevation Model (DEM) for the NASA OPERA project version 1.1 (v1.1) based on the Copernicus DEM 30-m and Copernicus 90-m referenced to the WGS84 ellipsoid" - - static_ancillary_file_group: - # burst database sqlite file - burst_database_file: {{ db_path }} - - product_group: - product_version: "1.0" - - # This should match the path used for OutputProductPath - product_path: {{ output_dir }} - - # This should match the path used for ScratchPath - scratch_path: {{ scratch_dir }} - - # This should match the path used for OutputProductPath - output_dir: {{ output_dir }} - - # Validity start date for RTC-S1-STATIC products in the format YYYYMMDD, - # This field must be provided for RTC-S1 jobs when static layer - # generation is enabled, and should match the value assigned to - # DataValidityStartDate above. - rtc_s1_static_validity_start_date: 20140403 - - # Location from where the output product can be retrieved (URL or DOI) - product_data_access: "https://search.asf.alaska.edu/#/?dataset=OPERA-S1&productTypes=RTC" - - # Location from where the static layers product can be retrieved - # The {burst_id} and {end_date} are template placeholders that will be - # filled in by the SAS if they are present in the string defined in this config - static_layers_data_access: "https://search.asf.alaska.edu/#/?dataset=OPERA-S1&productTypes=RTC-STATIC&operaBurstID={burst_id}&end={end_date}" - - # RTC-S1 imagery - save_bursts: True - - # Save mosaic of RTC-S1 bursts - save_mosaics: False - - # Save browse image(s) - save_browse: True - - output_imagery_format: COG - - # Save RTC-S1 metadata in the HDF5 format - # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or - # 'COG', and enabled by default for `output_imagery_format` equal - # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True - save_metadata: True - - primary_executable: - # Must be one of RTC_S1 or RTC_S1_STATIC, this determines the type of - # output product created - product_type: RTC_S1 - - # SLC processing options - # Consult the RTC-S1 SAS User's Guide for more info on each field - processing: - - # Check if ancillary input covers entirely output products - check_ancillary_inputs_coverage: True - - # Polarization channels to process. - polarization: {{ 'dual-pol' if dual_pol else 'co-pol' }} - - rtc: - output_type: gamma0 - - # OPTIONAL - to provide the number of processes when processing the bursts in parallel - # "0" means that the number will be automatically decided based on - # the number of cores, `OMP_NUM_THREADS` in environment setting, - # and the number of burst to process in runconfig - num_workers: 0 - - # OPTIONAL - Mechanism to specify output posting and DEM - geocoding: - # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_and_radargrid", - # and "auto" (default) - memory_mode: auto - - bursts_geogrid: - x_posting: {{ resolution }} - y_posting: {{ resolution }} - x_snap: {{ resolution }} - y_snap: {{ resolution }} - top_left: - x: - y: - bottom_right: - x: - y: - - # Fields to populate the products' metadata required by - # CEOS Analysis Ready Data specifications - estimated_geometric_accuracy_bias_x: -0.72 - estimated_geometric_accuracy_bias_y: -0.67 - estimated_geometric_accuracy_stddev_x: 0.7 - estimated_geometric_accuracy_stddev_y: 0.62 - - mosaicking: - # Mosaic mode - Choices: "average", "first", "bursts_center" (default) - mosaic_mode: first + Name: OPERA-RTC-S1-PGE-CONFIG + Groups: + PGE: + PGENameGroup: + PGEName: RTC_S1_PGE + InputFilesGroup: + InputFilePaths: + - {{ granule_path }} + - {{ orbit_path }} + DynamicAncillaryFilesGroup: + AncillaryFileMap: + dem_file: {{ dem_path }} + burst_database_file: {{ db_path }} + ProductPathGroup: + OutputProductPath: {{ output_dir }} + ScratchPath: {{ scratch_dir }} + PrimaryExecutable: + ProductIdentifier: RTC_S1 + ProductVersion: "1.0" + ProgramPath: conda + ProgramOptions: + - run + - --no-capture-output + - -n + - RTC + - rtc_s1.py + ErrorCodeBase: 300000 + SchemaPath: /home/rtc_user/opera/pge/rtc_s1/schema/rtc_s1_sas_schema.yaml + IsoTemplatePath: /home/rtc_user/opera/pge/rtc_s1/templates/OPERA_ISO_metadata_L2_RTC_S1_template.xml.jinja2 + # Date field which designates the point after which the + # RTC static layer product(s) should be considered valid. + # This field must be provided for RTC-S1 jobs when static layer + # generation is enabled (see below), and must be of the form YYYYMMDD + DataValidityStartDate: {{ data_validity_start_date }} + QAExecutable: + Enabled: False + ProgramPath: + ProgramOptions: [] + DebugLevelGroup: + DebugSwitch: False + ExecuteViaShell: False + SAS: + runconfig: + name: rtc_s1_workflow_default + groups: + pge_name_group: + pge_name: RTC_S1_PGE + input_file_group: + # Required. List of SAFE files (min=1) + safe_file_path: + - {{ granule_path }} + # Optional. Burst ID to process (empty for all bursts) + {% if opera_burst_id %} + burst_id: + - {{ opera_burst_id }} + {% endif %} + # Required. List of orbit (EOF) files (min=1) + orbit_file_path: + - {{ orbit_path }} + source_data_access: "https://search.asf.alaska.edu/#/?dataset=SENTINEL-1&productTypes=SLC" + dynamic_ancillary_file_group: + dem_file: {{ dem_path }} + dem_file_description: "Digital Elevation Model (DEM) for the NASA OPERA project version 1.1 (v1.1) based on the Copernicus DEM 30-m and Copernicus 90-m referenced to the WGS84 ellipsoid" + static_ancillary_file_group: + burst_database_file: {{ db_path }} + product_group: + product_version: "1.0" + product_path: {{ output_dir }} + scratch_path: {{ scratch_dir }} + output_dir: {{ output_dir }} + product_id: + rtc_s1_static_validity_start_date: {{ data_validity_start_date }} + product_data_access: "https://search.asf.alaska.edu/#/?dataset=OPERA-S1&productTypes=RTC" + static_layers_data_access: "https://search.asf.alaska.edu/#/?dataset=OPERA-S1&productTypes=RTC-STATIC&operaBurstID={burst_id}&end={end_date}" + save_bursts: True + save_mosaics: False + save_browse: True + output_imagery_format: COG + save_metadata: True + primary_executable: + product_type: RTC_S1 + processing: + check_ancillary_inputs_coverage: True + polarization: {{ 'dual-pol' if dual_pol else 'co-pol' }} + rtc: + output_type: gamma0 + # OPTIONAL - to provide the number of processes when processing the bursts in parallel + # "0" means that the number will be automatically decided based on + # the number of cores, `OMP_NUM_THREADS` in environment setting, + # and the number of burst to process in runconfig + num_workers: {{ num_workers }} + geocoding: + memory_mode: auto + # Fields to populate the products' metadata required by + # CEOS Analysis Ready Data specifications + estimated_geometric_accuracy_bias_x: -0.72 + estimated_geometric_accuracy_bias_y: -0.67 + estimated_geometric_accuracy_stddev_x: 0.7 + estimated_geometric_accuracy_stddev_y: 0.62 + mosaicking: + mosaic_mode: first + browse_image_group: + browse_image_burst_height: 2048 diff --git a/src/hyp3_opera_rtc/upload_rtc.py b/src/hyp3_opera_rtc/upload_rtc.py index b5cfb5e..dca6e2b 100644 --- a/src/hyp3_opera_rtc/upload_rtc.py +++ b/src/hyp3_opera_rtc/upload_rtc.py @@ -8,9 +8,12 @@ def upload_rtc(bucket: str, bucket_prefix: str, output_dir: Path) -> None: output_files = [f for f in output_dir.iterdir() if not f.is_dir()] - output_zip = make_zip(output_files, output_dir) + burst_count = len([f for f in output_files if f.name.endswith('h5')]) + if burst_count == 1: + output_zip = make_zip(output_files, output_dir) + output_files.append(output_zip) - for output_file in output_files + [output_zip]: + for output_file in output_files: upload_file_to_s3(output_file, bucket, bucket_prefix) @@ -34,7 +37,7 @@ def make_zip(output_files: list[Path], output_dir: Path) -> Path: def make_zip_name(product_files: list[Path]) -> str: - h5_file = next(f for f in product_files if f.name.endswith('h5')) + h5_file = [f for f in product_files if f.name.endswith('h5')][0] return h5_file.name.split('.h5')[0] diff --git a/tests/data/rtc_output_files.json b/tests/data/rtc_output_files.json new file mode 100644 index 0000000..b0e9efa --- /dev/null +++ b/tests/data/rtc_output_files.json @@ -0,0 +1,178 @@ +{ + "burst": [ + "OPERA_L2_RTC-S1_20250411T185446Z_S1A_30_v1.0.catalog.json", + "OPERA_L2_RTC-S1_20250411T185446Z_S1A_30_v1.0.log", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VV.tif" + ], + "slc": [ + "OPERA_L2_RTC-S1_20250710T215059Z_S1A_30_v1.0.catalog.json", + "OPERA_L2_RTC-S1_20250710T215059Z_S1A_30_v1.0.log", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245712-IW3_20240809T141630Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245713-IW1_20240809T141631Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245713-IW2_20240809T141632Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245713-IW3_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245714-IW2_20240809T141634Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245714-IW3_20240809T141635Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245715-IW1_20240809T141636Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245715-IW2_20240809T141637Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245715-IW3_20240809T141638Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245716-IW1_20240809T141639Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245716-IW2_20240809T141640Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245716-IW3_20240809T141641Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245717-IW1_20240809T141642Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245717-IW2_20240809T141643Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245717-IW3_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245718-IW1_20240809T141644Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245718-IW2_20240809T141645Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245718-IW3_20240809T141646Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245719-IW1_20240809T141647Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245719-IW2_20240809T141648Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245719-IW3_20240809T141649Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245720-IW1_20240809T141650Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245720-IW2_20240809T141651Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245720-IW3_20240809T141652Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245721-IW1_20240809T141653Z_20250710T215059Z_S1A_30_v1.0_VV.tif", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0.h5", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0.iso.xml", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0_BROWSE.png", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0_mask.tif", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0_VH.tif", + "OPERA_L2_RTC-S1_T115-245721-IW2_20240809T141654Z_20250710T215059Z_S1A_30_v1.0_VV.tif" + ] +} diff --git a/tests/test_dem.py b/tests/test_dem.py index 9de8f3b..fe00644 100644 --- a/tests/test_dem.py +++ b/tests/test_dem.py @@ -1,15 +1,42 @@ -from shapely.geometry import box +from shapely.geometry import Polygon, box from hyp3_opera_rtc import dem -def test_get_granule_url(): - test_url = 'https://nisar.asf.earthdatacloud.nasa.gov/STATIC/DEM/v1.1/EPSG4326/S10_W020/DEM_S01_00_W001_00.tif' - url = dem.get_dem_granule_url(-1, -1) - assert url == test_url +def test_margin_km_to_deg(): + assert round(dem.margin_km_to_deg(1), 3) == 0.009 + assert round(dem.margin_km_to_deg(0), 3) == 0.000 + assert round(dem.margin_km_to_deg(-1), 3) == -0.009 -def test_get_latlon_pairs(): - polygon = box(-1, -1, 1, 1) - latlon_pairs = dem.get_latlon_pairs(polygon) - assert latlon_pairs == [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 0), (0, 1), (1, -1), (1, 0), (1, 1)] +def test_margin_km_to_longitude_deg(): + assert round(dem.margin_km_to_longitude_deg(1, 0), 3) == 0.009 + assert round(dem.margin_km_to_longitude_deg(1, 45), 3) == 0.013 + assert round(dem.margin_km_to_longitude_deg(1, -45), 3) == 0.013 + assert round(dem.margin_km_to_longitude_deg(0, 0), 3) == 0.000 + assert round(dem.margin_km_to_longitude_deg(-1, 0), 3) == -0.009 + + +def test_polygon_from_bounds(): + poly = dem.polygon_from_bounds((-1, -1, 0, 0)) + assert isinstance(poly, Polygon) + assert tuple([round(x, 2) for x in poly.bounds]) == (-2.80, -2.80, 1.80, 1.80) + + cross_poly = dem.polygon_from_bounds((180, -1, 181, 0)) + assert isinstance(cross_poly, Polygon) + assert tuple([round(x, 2) for x in cross_poly.bounds]) == (178.20, -2.80, 182.80, 1.80) + + +def test_check_antimeridian(): + no_cross = box(-1, -1, 0, 0) + polys = dem.split_antimeridian(no_cross) + assert len(polys) == 1 + assert polys[0].equals(no_cross) + + cross = box(179, -1, 181, 0) + polys = dem.split_antimeridian(cross) + negative_side = box(-180, -1, -179, 0) + positive_side = box(179, -1, 180, 0) + assert len(polys) == 2 + assert polys[0].equals(negative_side) + assert polys[1].equals(positive_side) diff --git a/tests/test_prep_rtc.py b/tests/test_prep_rtc.py index 96f7c48..10a072c 100644 --- a/tests/test_prep_rtc.py +++ b/tests/test_prep_rtc.py @@ -1,5 +1,4 @@ import json -import unittest.mock from pathlib import Path import pytest @@ -9,15 +8,15 @@ from hyp3_opera_rtc import prep_rtc -def test_parse_response_for_slc_params(): +def test_parse_response_for_params(): test_response = json.loads(Path('tests/data/burst_response.json').read_text()) - slc_name, burst_id = prep_rtc.parse_response_for_slc_params(test_response) + slc_name, burst_id = prep_rtc.parse_response_for_burst_params(test_response) assert slc_name == 'S1A_IW_SLC__1SDV_20250413T020809_20250413T020836_058732_07464F_EF1E' assert burst_id == 't035_073251_iw2' @responses.activate -def test_granule_exists(): +def test_get_burst_from_cmr(): responses.get( url=prep_rtc.CMR_URL, match=[ @@ -25,54 +24,55 @@ def test_granule_exists(): {'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'S1_073251_IW2_20250413T020809_VV_EF1E-BURST'} ) ], - json={'items': ['foo']}, + json={'items': []}, ) responses.get( url=prep_rtc.CMR_URL, match=[ responses.matchers.query_param_matcher( - {'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'S1_073251_IW2_20250413T020809_VH_EF1E-BURST'} + {'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'foo_bad_burst_example_VV_-BURST'} ) ], - json={'items': []}, + status=400, ) + responses.get( url=prep_rtc.CMR_URL, - match=[responses.matchers.query_param_matcher({'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'foo'})], - status=400, + match=[ + responses.matchers.query_param_matcher( + {'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'S1_146160_IW1_20241029T095958_VV_592B-BURST'} + ) + ], + json={'items': ['foo']}, + ) + responses.get( + url=prep_rtc.CMR_URL, + match=[ + responses.matchers.query_param_matcher( + {'short_name': 'SENTINEL-1_BURSTS', 'granule_ur': 'S1_152193_IW3_20250415T143714_HH_EF65-BURST'} + ) + ], + json={'items': ['foo']}, ) - assert prep_rtc.granule_exists('S1_073251_IW2_20250413T020809_VV_EF1E-BURST') - - assert not prep_rtc.granule_exists('S1_073251_IW2_20250413T020809_VH_EF1E-BURST') + assert prep_rtc.get_burst_from_cmr('S1_146160_IW1_20241029T095958_VV_592B-BURST')['items'] + assert prep_rtc.get_burst_from_cmr('S1_152193_IW3_20250415T143714_HH_EF65-BURST')['items'] with pytest.raises(requests.HTTPError): - prep_rtc.granule_exists('foo') - - -def test_validate_co_pol_granule(): - def mock_granule_exists(granule: str) -> bool: - return granule in [ - 'S1_146160_IW1_20241029T095958_VV_592B-BURST', - 'S1_152193_IW3_20250415T143714_HH_EF65-BURST', - ] + prep_rtc.get_burst_from_cmr('foo_bad_burst_example_VV_-BURST') - with unittest.mock.patch('hyp3_opera_rtc.prep_rtc.granule_exists', mock_granule_exists): - prep_rtc.validate_co_pol_granule('S1_146160_IW1_20241029T095958_VV_592B-BURST') - prep_rtc.validate_co_pol_granule('S1_152193_IW3_20250415T143714_HH_EF65-BURST') + with pytest.raises( + ValueError, match=r'^S1_073251_IW2_20250413T020809_VH_EF1E-BURST has polarization VH, must be VV or HH' + ): + prep_rtc.get_burst_from_cmr('S1_073251_IW2_20250413T020809_VH_EF1E-BURST') - with pytest.raises( - ValueError, match=r'^S1_073251_IW2_20250413T020809_VH_EF1E-BURST has polarization VH, must be VV or HH' - ): - prep_rtc.validate_co_pol_granule('S1_073251_IW2_20250413T020809_VH_EF1E-BURST') + with pytest.raises( + ValueError, match=r'^S1_241258_IW1_20250418T105137_HV_57A0-BURST has polarization HV, must be VV or HH' + ): + prep_rtc.get_burst_from_cmr('S1_241258_IW1_20250418T105137_HV_57A0-BURST') - with pytest.raises( - ValueError, match=r'^S1_241258_IW1_20250418T105137_HV_57A0-BURST has polarization HV, must be VV or HH' - ): - prep_rtc.validate_co_pol_granule('S1_241258_IW1_20250418T105137_HV_57A0-BURST') - - with pytest.raises(ValueError, match=r'^Granule does not exist: S1_073251_IW2_20250413T020809_VV_EF1E-BURST$'): - prep_rtc.validate_co_pol_granule('S1_073251_IW2_20250413T020809_VV_EF1E-BURST') + with pytest.raises(ValueError, match=r'^Granule does not exist: S1_073251_IW2_20250413T020809_VV_EF1E-BURST$'): + prep_rtc.get_burst_from_cmr('S1_073251_IW2_20250413T020809_VV_EF1E-BURST') def test_get_cross_pol_name(): @@ -86,3 +86,9 @@ def test_get_cross_pol_name(): ) with pytest.raises(KeyError, match=r"^'VH'$"): prep_rtc.get_cross_pol_name('S1_073251_IW2_20250413T020809_VH_EF1E-BURST') + + +@pytest.mark.skip +def test_get_granule_params(): + prep_rtc.get_burst_params('S1_146160_IW1_20241029T095958_VV_592B-BURST') + prep_rtc.validate_slc('S1A_IW_SLC__1SDV_20250704T124517_20250704T124544_059934_0771EA_C208') diff --git a/tests/test_upload_rtc.py b/tests/test_upload_rtc.py index 0a3ec30..ba303cc 100644 --- a/tests/test_upload_rtc.py +++ b/tests/test_upload_rtc.py @@ -1,3 +1,5 @@ +import json +from collections import Counter from pathlib import Path from zipfile import ZipFile @@ -6,13 +8,12 @@ from moto import mock_aws from moto.core import patch_client -from hyp3_opera_rtc.upload_rtc import make_zip_name, upload_rtc +from hyp3_opera_rtc import upload_rtc -def test_upload_rtc(rtc_results_dir, rtc_output_files, s3_bucket): +def test_upload_burst_rtc(rtc_burst_results_dir, s3_bucket): prefix = 'myPrefix' - - upload_rtc(s3_bucket, prefix, rtc_results_dir) + upload_rtc.upload_rtc(s3_bucket, prefix, rtc_burst_results_dir) resp = aws.S3_CLIENT.list_objects_v2(Bucket=s3_bucket, Prefix=prefix) @@ -24,53 +25,75 @@ def test_upload_rtc(rtc_results_dir, rtc_output_files, s3_bucket): assert zip_filename == f'{product_name}.zip' - zip_download_path = rtc_results_dir / 'output.zip' + zip_download_path = rtc_burst_results_dir / 'output.zip' aws.S3_CLIENT.download_file(s3_bucket, zip_s3_key, zip_download_path) with ZipFile(zip_download_path) as zf: files_in_zip = set([f.filename for f in zf.infolist()]) - assert files_in_zip == set( - [ - f'{product_name}/', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_BROWSE.png', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.iso.xml', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.h5', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_mask.tif', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VH.tif', - f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VV.tif', - ] - ) + assert files_in_zip == { + f'{product_name}/', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_BROWSE.png', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.iso.xml', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.h5', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_mask.tif', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VH.tif', + f'{product_name}/OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VV.tif', + } + + +def test_upload_slc_rtc(rtc_slc_results_dir, s3_bucket): + prefix = 'myPrefix' + upload_rtc.upload_rtc(s3_bucket, prefix, rtc_slc_results_dir) + + resp = aws.S3_CLIENT.list_objects_v2(Bucket=s3_bucket, Prefix=prefix) + + zip_s3_keys = [c['Key'] for c in resp['Contents'] if c['Key'].endswith('.zip')] + assert len(zip_s3_keys) == 0 + + uploaded_files = [c['Key'] for c in resp['Contents']] + assert len(uploaded_files) == 164 + file_suffixs = dict(Counter(Path(f).suffix for f in uploaded_files)) + assert file_suffixs == {'.json': 1, '.log': 1, '.h5': 27, '.xml': 27, '.png': 27, '.tif': 27 * 3} -def test_make_zip_name(rtc_output_files): - zip_filename = make_zip_name([Path(f) for f in rtc_output_files]) + +def test_make_zip_name(rtc_burst_output_files): + zip_filename = upload_rtc.make_zip_name([Path(f) for f in rtc_burst_output_files]) assert zip_filename == 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0' @pytest.fixture -def rtc_results_dir(tmp_path, rtc_output_files): +def rtc_burst_results_dir(tmp_path, rtc_burst_output_files): + (tmp_path / 'burst-dir').mkdir(parents=True) + + for file in rtc_burst_output_files: + (tmp_path / file).touch() + + return tmp_path + + +@pytest.fixture +def rtc_slc_results_dir(tmp_path, rtc_slc_output_files): (tmp_path / 'burst-dir').mkdir(parents=True) - for file in rtc_output_files: + for file in rtc_slc_output_files: (tmp_path / file).touch() return tmp_path @pytest.fixture -def rtc_output_files(): - return [ - 'OPERA_L2_RTC-S1_20250411T185446Z_S1A_30_v1.0.catalog.json', - 'OPERA_L2_RTC-S1_20250411T185446Z_S1A_30_v1.0.log', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.iso.xml', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_BROWSE.png', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0.h5', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_mask.tif', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VH.tif', - 'OPERA_L2_RTC-S1_T115-245714-IW1_20240809T141633Z_20250411T185446Z_S1A_30_v1.0_VV.tif', - ] +def rtc_burst_output_files(): + with (Path(__file__).parent / 'data' / 'rtc_output_files.json').open() as f: + return json.load(f)['burst'] + + +@pytest.fixture +def rtc_slc_output_files(): + with (Path(__file__).parent / 'data' / 'rtc_output_files.json').open() as f: + return json.load(f)['slc'] @pytest.fixture