From 5a9966e8e1e4247bae5c4ace6a2521a7387a0edc Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Thu, 19 Jun 2025 14:22:12 -0800 Subject: [PATCH 1/8] fix: custom client id, aria gunw stacking output support --- CHANGELOG.md | 9 +++ src/SearchAPI/application/SearchAPISession.py | 26 ++++++++ src/SearchAPI/application/__init__.py | 1 + src/SearchAPI/application/application.py | 24 ++++--- src/SearchAPI/application/asf_opts.py | 4 +- src/SearchAPI/application/search.py | 66 ++++++++++--------- 6 files changed, 88 insertions(+), 42 deletions(-) create mode 100644 src/SearchAPI/application/SearchAPISession.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 21458cc..a4db33a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,15 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - --> +------ +## [1.0.5](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.4...v1.0.5) +### Added +- Create wrapper class around asf-search `ASFSession`, `SearchAPISession`. Modifies client ID. + +### Changed +- Aria stack supports different output types +- asf_search uses `SearchAPISession` by default for search queries + ------ ## [1.0.4](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.3...v1.0.4) ### Added diff --git a/src/SearchAPI/application/SearchAPISession.py b/src/SearchAPI/application/SearchAPISession.py new file mode 100644 index 0000000..1d2f5aa --- /dev/null +++ b/src/SearchAPI/application/SearchAPISession.py @@ -0,0 +1,26 @@ +from typing import List +from asf_search import ASFSession + + +class SearchAPISession(ASFSession): + def __init__( + self, + edl_host: str = None, + edl_client_id: str = None, + asf_auth_host: str = None, + cmr_host: str = None, + cmr_collections: str = None, + auth_domains: List[str] = None, + auth_cookie_names: List[str] = None, + ): + super().__init__( + edl_host, + edl_client_id, + asf_auth_host, + cmr_host, + cmr_collections, + auth_domains, + auth_cookie_names, + ) + + self.headers.update({'Client-Id': f'SearchAPI_{self.headers.get("Client-Id")}'}) \ No newline at end of file diff --git a/src/SearchAPI/application/__init__.py b/src/SearchAPI/application/__init__.py index 549425f..56ff4d4 100644 --- a/src/SearchAPI/application/__init__.py +++ b/src/SearchAPI/application/__init__.py @@ -3,4 +3,5 @@ from .logger import * from .log_router import * from .search import * +from .SearchAPISession import * from .application import * \ No newline at end of file diff --git a/src/SearchAPI/application/application.py b/src/SearchAPI/application/application.py index 907684b..ecde580 100644 --- a/src/SearchAPI/application/application.py +++ b/src/SearchAPI/application/application.py @@ -18,9 +18,12 @@ from .output import as_output, get_asf_search_script, make_filename from .files_to_wkt import FilesToWKT from . import constants -from .search import stack_aria_gunw +from .SearchAPISession import SearchAPISession +from .search import get_aria_groups_for_frame, stack_aria_gunw import time +from asf_search.ASFSearchOptions.config import config as asf_config +asf_config['session'] = SearchAPISession() asf.REPORT_ERRORS = False router = APIRouter(route_class=LoggingRoute) @@ -97,14 +100,17 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces if searchOptions.opts.dataset is not None: if searchOptions.opts.dataset[0] == asf.DATASET.ARIA_S1_GUNW: - return JSONResponse( - content=stack_aria_gunw(reference), - status_code=200, - headers= { - **constants.DEFAULT_HEADERS, - 'Content-Disposition': f"attachment; filename={make_filename('json')}", - } - ) + if output.lower() == 'count': + return Response( + content=str(len(get_aria_groups_for_frame(reference)[1])), + status_code=200, + media_type='text/html; charset=utf-8', + headers=constants.DEFAULT_HEADERS + ) + return + stack = stack_aria_gunw(reference) + response_info = as_output(stack, output=output) + return Response(**response_info) # Load the reference scene: if output.lower() == 'python': diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index 1ca9515..158d042 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -10,7 +10,7 @@ from asf_search.ASFSearchOptions import validator_map from .asf_env import load_config_maturity - +from .SearchAPISession import SearchAPISession from .logger import api_logger non_search_param = ['output', 'maxresults', 'pagesize', 'maturity'] @@ -166,7 +166,7 @@ async def process_search_request(request: Request) -> SearchOptsModel: merged_args = {**query_params, **body} if (token := merged_args.get('cmr_token')): - session = asf.ASFSession() + session = SearchAPISession() session.headers.update({'Authorization': 'Bearer {0}'.format(token)}) query_opts.session = session diff --git a/src/SearchAPI/application/search.py b/src/SearchAPI/application/search.py index e4f2038..7c43303 100644 --- a/src/SearchAPI/application/search.py +++ b/src/SearchAPI/application/search.py @@ -2,10 +2,41 @@ import dateparser import asf_search as asf +from asf_search import ASFSearchResults from shapely.wkt import dumps as dump_to_wkt from shapely import Polygon def stack_aria_gunw(frame: str): + reference, aria_groups = get_aria_groups_for_frame(frame) + + # track group index on each product, naively choose first granule available + for idx, group in enumerate(aria_groups): + group_granule_idx = None + for idy, product in enumerate(group['products']): + product.properties['groupIDX'] = idx + if group_granule_idx is None: + if product.has_baseline(): + group_granule_idx = idy + + group['group_granule_idx'] = group_granule_idx + + + + stack = ASFSearchResults([group['products'][group['group_granule_idx']] for group in aria_groups if group['group_granule_idx'] is not None]) + target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack) + # for product in target_stack: + # group_idx = product.properties.pop('groupIDX') + # aria_groups[group_idx]['perpendicularBaseline'] = product.properties['perpendicularBaseline'] + # aria_groups[group_idx]['temporalBaseline'] = product.properties['temporalBaseline'] + + # for group in aria_groups: + # for idx, product in enumerate(group['products']): + # group['products'][idx] = product.properties['sceneName'] + # group['date'] = group['date'].strftime('%Y-%m-%dT%H:%M:%SZ') + + return target_stack + +def get_aria_groups_for_frame(frame: str): reference = asf.search(frame=int(frame), dataset=asf.DATASET.ARIA_S1_GUNW, maxResults=1)[0] opts = asf.ASFSearchOptions( @@ -18,44 +49,17 @@ def stack_aria_gunw(frame: str): intersectsWith=dump_to_wkt(Polygon(reference.geometry['coordinates'][0])) ) - slc_stack = asf.search(opts=opts) + slc_products = asf.search(opts=opts) groups = defaultdict(list) - for product in slc_stack: + for product in slc_products: group_id = product.properties['platform'] + '_' + str(product.properties['orbit']) groups[group_id].append(product) # dateparser.parse(str(value)) - aria_groups = [ + return reference, [ { 'date': min(dateparser.parse(product.properties['startTime']) for product in group), 'products': [product for product in group], } for group in groups.values() - ] - - # track group index on each product, naively choose first granule available - for idx, group in enumerate(aria_groups): - group_granule_idx = None - for idy, product in enumerate(group['products']): - product.properties['groupIDX'] = idx - if group_granule_idx is None: - if product.has_baseline(): - group_granule_idx = idy - - group['group_granule_idx'] = group_granule_idx - - - - stack = asf.ASFSearchResults([group['products'][group['group_granule_idx']] for group in aria_groups if group['group_granule_idx'] is not None]) - target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack) - for product in target_stack: - group_idx = product.properties.pop('groupIDX') - aria_groups[group_idx]['perpendicularBaseline'] = product.properties['perpendicularBaseline'] - aria_groups[group_idx]['temporalBaseline'] = product.properties['temporalBaseline'] - - for group in aria_groups: - for idx, product in enumerate(group['products']): - group['products'][idx] = product.properties['sceneName'] - group['date'] = group['date'].strftime('%Y-%m-%dT%H:%M:%SZ') - - return aria_groups \ No newline at end of file + ] \ No newline at end of file From c73b02c4154f9e87159aceef9526a15b46199b95 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 30 Jun 2025 11:57:03 -0800 Subject: [PATCH 2/8] bump asf-search to 9.0.4 --- CHANGELOG.md | 1 + requirements.txt | 2 +- src/SearchAPI/application/application.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a4db33a..bd64e68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ### Changed - Aria stack supports different output types - asf_search uses `SearchAPISession` by default for search queries +- bump asf-search to v9.0.4 ------ ## [1.0.4](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.3...v1.0.4) diff --git a/requirements.txt b/requirements.txt index f8ed52f..4764494 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,7 +22,7 @@ ujson==5.7.0 uvicorn==0.21.1 watchfiles==0.19.0 -asf_search==9.0.2 +asf_search==9.0.4 python-json-logger==2.0.7 pyshp==2.1.3 diff --git a/src/SearchAPI/application/application.py b/src/SearchAPI/application/application.py index ecde580..f44fbe4 100644 --- a/src/SearchAPI/application/application.py +++ b/src/SearchAPI/application/application.py @@ -107,7 +107,7 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces media_type='text/html; charset=utf-8', headers=constants.DEFAULT_HEADERS ) - return + stack = stack_aria_gunw(reference) response_info = as_output(stack, output=output) return Response(**response_info) From 5bb32d8742d940778ebeef5ad55696dc84422b54 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Wed, 2 Jul 2025 11:48:39 -0800 Subject: [PATCH 3/8] feat: aria stacking uses aria frame id instead of frame number, add asf_enumeration package --- CHANGELOG.md | 1 + requirements.txt | 1 + src/SearchAPI/application/search.py | 63 +++++------------------------ 3 files changed, 12 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bd64e68..74ef5ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ### Changed - Aria stack supports different output types +- Aria stacking uses aria frame id instead of frame number for stacking - asf_search uses `SearchAPISession` by default for search queries - bump asf-search to v9.0.4 diff --git a/requirements.txt b/requirements.txt index 4764494..01b83c9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,6 +24,7 @@ watchfiles==0.19.0 asf_search==9.0.4 python-json-logger==2.0.7 +asf_enumeration pyshp==2.1.3 geopandas diff --git a/src/SearchAPI/application/search.py b/src/SearchAPI/application/search.py index 7c43303..1291adf 100644 --- a/src/SearchAPI/application/search.py +++ b/src/SearchAPI/application/search.py @@ -2,64 +2,21 @@ import dateparser import asf_search as asf -from asf_search import ASFSearchResults +from asf_search import ASFSearchResults, ASFProduct from shapely.wkt import dumps as dump_to_wkt from shapely import Polygon -def stack_aria_gunw(frame: str): - reference, aria_groups = get_aria_groups_for_frame(frame) +from asf_enumeration import aria_s1_gunw - # track group index on each product, naively choose first granule available - for idx, group in enumerate(aria_groups): - group_granule_idx = None - for idy, product in enumerate(group['products']): - product.properties['groupIDX'] = idx - if group_granule_idx is None: - if product.has_baseline(): - group_granule_idx = idy - - group['group_granule_idx'] = group_granule_idx - - +def stack_aria_gunw(frame_id: str): + reference, aria_groups = get_aria_groups_for_frame(frame_id) - stack = ASFSearchResults([group['products'][group['group_granule_idx']] for group in aria_groups if group['group_granule_idx'] is not None]) + stack = ASFSearchResults([group.products[0] for group in aria_groups]) target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack) - # for product in target_stack: - # group_idx = product.properties.pop('groupIDX') - # aria_groups[group_idx]['perpendicularBaseline'] = product.properties['perpendicularBaseline'] - # aria_groups[group_idx]['temporalBaseline'] = product.properties['temporalBaseline'] - - # for group in aria_groups: - # for idx, product in enumerate(group['products']): - # group['products'][idx] = product.properties['sceneName'] - # group['date'] = group['date'].strftime('%Y-%m-%dT%H:%M:%SZ') - - return target_stack - -def get_aria_groups_for_frame(frame: str): - reference = asf.search(frame=int(frame), dataset=asf.DATASET.ARIA_S1_GUNW, maxResults=1)[0] - opts = asf.ASFSearchOptions( - relativeOrbit=reference.properties['pathNumber'], - processingLevel=asf.PRODUCT_TYPE.SLC, - dataset=asf.DATASET.SENTINEL1, - beamMode='IW', - polarization=['VV','VV+VH'], - flightDirection=reference.properties['flightDirection'], - intersectsWith=dump_to_wkt(Polygon(reference.geometry['coordinates'][0])) - ) - - slc_products = asf.search(opts=opts) + return target_stack - groups = defaultdict(list) - for product in slc_products: - group_id = product.properties['platform'] + '_' + str(product.properties['orbit']) - groups[group_id].append(product) - # dateparser.parse(str(value)) - return reference, [ - { - 'date': min(dateparser.parse(product.properties['startTime']) for product in group), - 'products': [product for product in group], - } - for group in groups.values() - ] \ No newline at end of file +def get_aria_groups_for_frame(frame: str) -> tuple[ASFProduct, list[aria_s1_gunw.Sentinel1Acquisition]]: + aria_frame = aria_s1_gunw.get_frame(frame_id=int(frame)) + groups = aria_s1_gunw.get_acquisitions(aria_frame) + return groups[0].products[0], groups From a89d3c3d7b5234f1f19610fc7855306be7bc6559 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 7 Jul 2025 11:09:22 -0800 Subject: [PATCH 4/8] change: bump query limit to 3000, raise error if expected output exceeds results --- CHANGELOG.md | 1 + src/SearchAPI/application/asf_opts.py | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 74ef5ec..4b9b835 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - Aria stacking uses aria frame id instead of frame number for stacking - asf_search uses `SearchAPISession` by default for search queries - bump asf-search to v9.0.4 +- increase search query limit to 3000, raise error if expected output is over that number ------ ## [1.0.4](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.3...v1.0.4) diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index 158d042..9ee5027 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -177,13 +177,20 @@ async def process_search_request(request: Request) -> SearchOptsModel: try: # we are no longer allowing unbounded searches - if query_opts.granule_list is None and query_opts.product_list is None: + if query_opts.granule_list is None and query_opts.product_list is None and output != 'python': if query_opts.maxResults is None: - query_opts.maxResults = asf.search_count(opts=query_opts) + maxResults = asf.search_count(opts=query_opts) + if maxResults > 3000: + raise ValueError( + ( + 'SearchAPI no longer supports unbounded searches with expected results over 3000, ' + 'please use the asf-search python module for long-lived searches or set `maxResults` to 3000 or less.' + '\nTo have SearchAPI automatically generate a python script for the equivalent search to your SearchAPI query ' + 'set `output=python`' + ) + ) elif query_opts.maxResults <= 0: - raise ValueError(f'Search keyword "maxResults" must be greater than 0') - - query_opts.maxResults = min(1500, query_opts.maxResults) + raise ValueError('Search keyword "maxResults" must be greater than 0') searchOpts = SearchOptsModel(opts=query_opts, output=output, merged_args=merged_args, request_method=request.method) except (ValueError, ValidationError) as exc: From d9a8900c460ce65b4b6901353366a6582701c0e5 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 7 Jul 2025 11:14:14 -0800 Subject: [PATCH 5/8] fix: decrease max allowed results to 2000 --- CHANGELOG.md | 2 +- src/SearchAPI/application/asf_opts.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b9b835..67ec2c1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,7 +35,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - Aria stacking uses aria frame id instead of frame number for stacking - asf_search uses `SearchAPISession` by default for search queries - bump asf-search to v9.0.4 -- increase search query limit to 3000, raise error if expected output is over that number +- increase search query limit to 2000, raise error if expected output is over that number ------ ## [1.0.4](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.3...v1.0.4) diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index 9ee5027..7ee1a25 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -180,11 +180,11 @@ async def process_search_request(request: Request) -> SearchOptsModel: if query_opts.granule_list is None and query_opts.product_list is None and output != 'python': if query_opts.maxResults is None: maxResults = asf.search_count(opts=query_opts) - if maxResults > 3000: + if maxResults > 2000: raise ValueError( ( - 'SearchAPI no longer supports unbounded searches with expected results over 3000, ' - 'please use the asf-search python module for long-lived searches or set `maxResults` to 3000 or less.' + 'SearchAPI no longer supports unbounded searches with expected results over 2000, ' + 'please use the asf-search python module for long-lived searches or set `maxResults` to 2000 or less.' '\nTo have SearchAPI automatically generate a python script for the equivalent search to your SearchAPI query ' 'set `output=python`' ) From 3ce8032550157221b1a2928623f66cca29213943 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 7 Jul 2025 11:19:48 -0800 Subject: [PATCH 6/8] fix: remove newline from error message --- src/SearchAPI/application/asf_opts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index 7ee1a25..ab81ee1 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -184,8 +184,8 @@ async def process_search_request(request: Request) -> SearchOptsModel: raise ValueError( ( 'SearchAPI no longer supports unbounded searches with expected results over 2000, ' - 'please use the asf-search python module for long-lived searches or set `maxResults` to 2000 or less.' - '\nTo have SearchAPI automatically generate a python script for the equivalent search to your SearchAPI query ' + 'please use the asf-search python module for long-lived searches or set `maxResults` to 2000 or less. ' + 'To have SearchAPI automatically generate a python script for the equivalent search to your SearchAPI query ' 'set `output=python`' ) ) From 8038795af6bc427be9c801fcc4c0a7e36ada6fb7 Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 7 Jul 2025 11:26:28 -0800 Subject: [PATCH 7/8] fix: support count with unbounded results --- src/SearchAPI/application/asf_opts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index ab81ee1..6b27e3d 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -177,7 +177,7 @@ async def process_search_request(request: Request) -> SearchOptsModel: try: # we are no longer allowing unbounded searches - if query_opts.granule_list is None and query_opts.product_list is None and output != 'python': + if query_opts.granule_list is None and query_opts.product_list is None and output not in ['python', 'count']: if query_opts.maxResults is None: maxResults = asf.search_count(opts=query_opts) if maxResults > 2000: From 0a49ea25cf86d242d90ddb41082d186a4a6c2cba Mon Sep 17 00:00:00 2001 From: SpicyGarlicAlbacoreRoll Date: Mon, 7 Jul 2025 13:44:39 -0800 Subject: [PATCH 8/8] fix: baseline works again with search request size limit --- src/SearchAPI/application/asf_opts.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/SearchAPI/application/asf_opts.py b/src/SearchAPI/application/asf_opts.py index 6b27e3d..ccf7313 100644 --- a/src/SearchAPI/application/asf_opts.py +++ b/src/SearchAPI/application/asf_opts.py @@ -147,7 +147,7 @@ async def get_body(request: Request): return {} -async def process_search_request(request: Request) -> SearchOptsModel: +async def process_search_request(request: Request, is_baseline: bool = False) -> SearchOptsModel: """ Extracts the request's query+body params, returns ASFSearchOptions, request method, output format, and a dictionary of the merged request args wrapped in a pydantic model (SearchOptsModel) @@ -177,7 +177,12 @@ async def process_search_request(request: Request) -> SearchOptsModel: try: # we are no longer allowing unbounded searches - if query_opts.granule_list is None and query_opts.product_list is None and output not in ['python', 'count']: + if ( + query_opts.granule_list is None + and query_opts.product_list is None + and output not in ['python', 'count'] + and not is_baseline + ): if query_opts.maxResults is None: maxResults = asf.search_count(opts=query_opts) if maxResults > 2000: @@ -201,7 +206,7 @@ async def process_search_request(request: Request) -> SearchOptsModel: async def process_baseline_request(request: Request) -> BaselineSearchOptsModel: """Processes request to baseline endpoint""" - searchOpts = await process_search_request(request=request) + searchOpts = await process_search_request(request=request, is_baseline=True) reference = searchOpts.merged_args.get('reference') try: baselineSearchOpts = BaselineSearchOptsModel(**searchOpts.model_dump(), reference=reference)