diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..cf44dfa
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,31 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: "[Bug]"
+labels: ''
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Provide an example url to reproduce the behavior.
+
+\*Reminder: If authentication is required **do not** leave any sensitive credentials in the snippet.
+
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. Ubuntu 20.04]
+ - Python Version [e.g. python3.11]
+ - Pip Environment ['python3 -m pip freeze']
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..79764a6
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,43 @@
+# Merge Requirements:
+The following requirements must be met for your pull request to be considered for review & merging. Until these requirements are met please mark the pull request as a draft.
+
+## Purpose
+Why is this pull request necessary? Provide a reference to a related issue in this repository that your pull request addresses (if applicable).
+
+## Description
+A brief description of the changes proposed in the pull request. If there are any changes to packaging requirements please list them. If it's a new endpoint list:
+- Supported HTTP methods
+- input params
+- output
+- errors it can raise
+
+## Snippet
+If the pull request provides a new feature, provide an example demonstrating the use-case(s) for this pull request (If applicable).
+
+For example, if you are adding a new endpoint, show how we might call it and what kind of output we could expect:
+``` bash
+curl 'http://127.0.0.1:8080/services/utils/useful_new_endpoint?param1=value1¶m2=value2'
+```
+
+If it modifies an existing endpoint (like a new output type for `/services/search/param`) show and example of what the output would look like.
+
+## Error/Warning/Regression Free
+Your code runs without any unhandled errors, warnings, or regressions
+
+## Unit Tests
+
+You have added unit tests to the test suite see the [README Testing section](https://github.com/asfadmin/Discovery-SearchAPI-v3/tree/dev?tab=readme-ov-file#writing-tests) for an overview on adding tests to the test suite.
+
+## Target Merge Branch
+Your pull request targets the `dev` branch
+
+
+***
+
+### Checklist
+- [ ] Purpose
+- [ ] Description
+- [ ] Snippet
+- [ ] Error/Warning/Regression Free
+- [ ] Unit Tests
+- [ ] Target Merge Branch
\ No newline at end of file
diff --git a/.github/workflows/deploy-prod-staging.yml b/.github/workflows/deploy-prod-staging.yml
index 6ff897a..6bc9b34 100644
--- a/.github/workflows/deploy-prod-staging.yml
+++ b/.github/workflows/deploy-prod-staging.yml
@@ -78,6 +78,7 @@ jobs:
with:
aws-account-id: ${{ secrets.AWS_ACCOUNT_ID }}
staging: true
+ maturity: prod-staging
run-prod-staging-integration-tests:
needs: [deploy-prod-staging]
diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml
index 20f5d0c..a1ed741 100644
--- a/.github/workflows/deploy-prod.yml
+++ b/.github/workflows/deploy-prod.yml
@@ -40,4 +40,5 @@ jobs:
uses: ./.github/workflows/search-api-composite
with:
aws-account-id: ${{ secrets.AWS_ACCOUNT_ID }}
+ maturity: prod
diff --git a/.github/workflows/deploy-test-staging.yml b/.github/workflows/deploy-test-staging.yml
index 14c9df0..14d83ee 100644
--- a/.github/workflows/deploy-test-staging.yml
+++ b/.github/workflows/deploy-test-staging.yml
@@ -77,7 +77,7 @@ jobs:
uses: ./.github/workflows/search-api-composite
with:
aws-account-id: ${{ secrets.AWS_ACCOUNT_ID }}
- staging: true
+ maturity: test-staging
run-test-staging-integration-tests:
needs: [deploy-test-staging]
diff --git a/.github/workflows/deploy-test.yml b/.github/workflows/deploy-test.yml
index 24372eb..1073b73 100644
--- a/.github/workflows/deploy-test.yml
+++ b/.github/workflows/deploy-test.yml
@@ -40,3 +40,4 @@ jobs:
uses: ./.github/workflows/search-api-composite
with:
aws-account-id: ${{ secrets.AWS_ACCOUNT_ID }}
+ maturity: test
diff --git a/.github/workflows/search-api-composite/action.yml b/.github/workflows/search-api-composite/action.yml
index 51d1dcb..c9bafa6 100644
--- a/.github/workflows/search-api-composite/action.yml
+++ b/.github/workflows/search-api-composite/action.yml
@@ -13,10 +13,9 @@ inputs:
security-group:
required: true
type: string
-
- staging:
- required: false
- type: boolean
+ maturity:
+ required: true
+ type: string
runs:
using: "composite"
@@ -57,5 +56,4 @@ runs:
--context vpc_id=${{ inputs.vpc-id }} \
--context subnet_ids=${{ inputs.subnet-ids }} \
--context security_group=${{ inputs.security-group }} \
- --context staging=${{ inputs.staging }}
-
+ --context api_stage=${{ inputs.maturity }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0adf32e..3670a95 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -25,6 +25,16 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
-->
+------
+## [1.0.8](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.7...v1.0.8)
+### Changed
+- bump asf-search to v10.1.1 for NISAR product type file sizes, urgent response now searchable with product types, and ARIA-S1 GUNW Stacking support, better nisar metadata output formatting, OPERA-S1 `TROPO-ZENITH`
+
+### Fixed
+- boolean values are properly capitalized in `python` output file
+- API maturity set for each level of deployment stage
+- API maturity loaded once per api instance
+
------
## [1.0.7](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.6...v1.0.7)
### Changed
diff --git a/Dockerfile b/Dockerfile
index a5ffcaf..f8726e6 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,6 +2,8 @@ FROM public.ecr.aws/docker/library/python:3.12
COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.3 /lambda-adapter /opt/extensions/lambda-adapter
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv
+ARG MATURITY="local"
+ENV MATURITY=${MATURITY}
ARG HOST=0.0.0.0
ENV HOST=${HOST}
ARG PORT=8080
diff --git a/README.es.md b/README.es.md
new file mode 100644
index 0000000..b777ed5
--- /dev/null
+++ b/README.es.md
@@ -0,0 +1,152 @@
+# SearchAPI-v3
+
+[](./README.md)
+
+
+SearchAPI-v3 es un contenedor alrededor del [módulo de python asf-search](https://github.com/asfadmin/Discovery-asf_search) utilizando un despliegue sin servidor con el framework FastAPI y AWS Lambda.
+
+### Endpoints principales
+
+
+
+
+ | Endpoint |
+ Descripción |
+ Métodos |
+
+
+
+
+ | `/` |
+ Información de configuración del servidor |
+ `GET` |
+
+
+ | `/health` |
+ igual que root `/` |
+ `GET` |
+
+
+ | `/services/search/param` |
+ Búsqueda mediante cualquier parámetro válido de asf-search |
+ `GET` `POST` `HEAD` |
+
+
+ | `/services/search/baseline` |
+ Crear un stack de línea base a partir de una referencia dada y dataset opcional |
+ `GET` `POST` `HEAD` |
+
+
+
+
+## Desarrollo
+
+### Ramificación
+
+
+
+
+ | Instancia |
+ Rama |
+ Descripción, Instrucciones, Notas |
+
+
+
+
+ | Características |
+ feat-* |
+ Siempre crear a partir de dev, para nuevas funciones |
+
+
+ | Problemas |
+ bugfix-* |
+ Siempre crear a partir de dev, para correcciones |
+
+
+ | desarrollo |
+ dev |
+ Aquí comienza la integración inicial; despliega en el entorno test-staging. |
+
+
+ | pruebas |
+ test |
+ Solo acepta fusiones desde la rama development; despliegue en el entorno de prueba |
+
+
+ | preproducción e integración |
+ prod-staging |
+ Solo acepta fusiones desde la rama testing; despliegue en el entorno prod-staging |
+
+
+ | release |
+ prod |
+ Solo acepta fusiones desde la rama prod-staging; rama de release, despliegue a producción |
+
+
+
+
+### Instalación
+
+Para instalar localmente, ejecute lo siguiente en una terminal (se recomienda altamente hacerlo en un entorno virtual):
+```bash
+pip install -r requirements.txt
+pip install .
+```
+
+Para instalar los requisitos de prueba:
+```bash
+pip install -r tests/requirements.txt
+```
+
+### Ejecución local
+
+Para ejecutar la API localmente, ejecute lo siguiente en una terminal:
+```bash
+uvicorn src.SearchAPI.application:app --reload --port 8080
+```
+La API ahora debería estar disponible en su localhost en http://127.0.0.1:8080 y se puede consultar con su navegador o herramienta de red de preferencia.
+
+
+
+
+## Pruebas
+
+### Ejecutar el conjunto de pruebas localmente
+Después de ejecutar la API (vea `Ejecución local` arriba), para correr la suite de pruebas localmente ejecute:
+```bash
+pytest --api "http://127.0.0.1:8080" -n auto "tests/yml_tests/"
+```
+
+### Escribir pruebas
+Las pruebas deben escribirse en las subcarpetas y archivos relevantes en `/tests`.
+
+La suite de pruebas usa el plugin `pytest-automation` que permite definir y reutilizar entradas para casos de prueba en formato yaml. Los casos de prueba se escriben en archivos dentro de `tests/yml_tests/`, y los recursos reutilizables en `tests/yml_tests/Resources/`.
+
+```yaml
+
+tests:
+- Test Nisar Product L1 RSLC: # este es un caso de prueba
+ product: NISAR_L1_PR_RSLC_087_039_D_114_2005_DHDH_A_20251102T222008_20251102T222017_T00407_N_P_J_001.yml # este archivo debe estar en `tests/yml_tests/Resources/`. Vea otros archivos yml en la carpeta para observar cómo podría estructurar el objeto yml
+ product_level: L1
+
+- Test Nisar Product L2 GSLC: # este es otro caso de prueba
+ product: NISAR_L2_PR_GSLC_087_039_D_112_2005_DHDH_A_20251102T221859_20251102T221935_T00407_N_F_J_001.yml
+ product_level: L2
+```
+
+Podemos crear el mapeo desde nuestros casos de prueba yaml en `tests/yml_tests/pytest-config.yml`, que se usará para llamar la función de python deseada en `tests/yml_tests/pytest-managers.py`.
+
+En `tests/yml_tests/pytest-config.yml`:
+```yaml
+- Para ejecutar pruebas de ASFProduct:
+ required_keys: ['product', 'product_level'] # las claves que requiere el caso de prueba
+ method: test_NISARProduct # la función de Python en pytest-managers.py que será llamada
+ required_in_title: Test Nisar Product # (OPCIONAL) solo ejecutará los casos de prueba que tengan `Test Nisar Product` en el nombre; por ello, los dos casos anteriores se ejecutarían con nuestras pruebas.
+```
+
+En `tests/yml_tests/pytest-managers.py`:
+
+```python
+def test_new_endpoint(client=None, **args) -> None:
+ test_new_endpoint(client=client, **args)
+```
diff --git a/README.md b/README.md
index c308a28..026c7bb 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,152 @@
# SearchAPI-v3
-- Login to aws console using Kion
-- Find the region that has the VPC and note account number, vpc_id, subnet_ids and security_group. Subnet_ids should be a comma separated list
-- Get temp cli credentials from Kion and add them to your shell
-- Run CDK bootstrap in region with the VPC using cdk-bootstrap-example.sh and filling in the account number, vpc_id, subnet_ids and security_group
-- If not already created, make an GitHubActionsOidcProvider using the cdk/oidc/oidc-provider.yml template
-- Create OIDC role using cloudformation template cdk/oidc/github-actions-oidc.yml. For 'ActionsRoleName' parameter put 'SearchAPIActionsOIDCRole'
-- Create a github environment with params using the same values from the CDK bootstrap. AWS_ACCOUNT_ID, SECURITY_GROUP, SUBNET_IDS, VPC_ID
+
+[](./README.es.md)
+
+SearchAPI-v3 is a wrapper around the [asf-search python module](https://github.com/asfadmin/Discovery-asf_search) using a serverless deployment with the FastAPI web framework and AWS lambda.
+
+### Main Endpoints
+
+
+
+
+ | Endpoint |
+ Description |
+ Methods |
+
+
+
+
+ | `/` |
+ server configuration info |
+ `GET` |
+
+
+ | `/health` |
+ same as root `/` |
+ `GET` |
+
+
+ | `/services/search/param` |
+ Search via any valid asf-search parameters |
+ `GET` `POST` `HEAD` |
+
+
+ | `/services/search/baseline` |
+ Create a baseline stack based off a given reference and optional dataset |
+ `GET` `POST` `HEAD` |
+
+
+
+
+## Development
+
+### Branching
+
+
+
+
+ | Instance |
+ Branch |
+ Description, Instructions, Notes |
+
+
+
+
+ | Features |
+ feat-* |
+ Always branch off dev, for new features |
+
+
+ | Issues |
+ bugfix-* |
+ Always branch off dev, for bugfixes |
+
+
+ | development |
+ dev |
+ Beginning integration testing happens here, deploys to test-staging deployment |
+
+
+ | testing |
+ test |
+ Only accepts merges from development, test deployment |
+
+
+ | production staging and integration testing |
+ prod-staging |
+ Only acepts merges from testing, deploys to prod-staging deployment |
+
+
+ | release |
+ prod |
+ only accepts merges from prod-staging, release branch, prod deployment |
+
+
+
+
+### Installation
+
+To install locally run the following in a terminal (we highly recommend installing using a virtual environment)
+```bash
+pip install -r requirements.txt
+pip install .
+```
+
+To install test requirements, run
+```bash
+pip install -r tests/requirements.txt
+```
+
+### Running Locally
+
+To run the API locally run the following in a terminal
+```bash
+uvicorn src.SearchAPI.application:app --reload --port 8080
+```
+The api should now be available via your localhost at http://127.0.0.1:8080 and can be opened and queried with your browser or network tool of choice.
+
+
+
+
+## Testing
+
+## Running the Test Suite Locally
+After running the API (see `Running Locally` above), in order to run the test suite locally run the following:
+```bash
+pytest --api "http://127.0.0.1:8080" -n auto "tests/yml_tests/"
+```
+
+## Writing tests
+Tests should be written to relevant subfolder & files in `/tests`
+
+The test suite uses the `pytest-automation` pytest plugin which allows us to define and re-use input for test cases in the yaml format. Test cases are written to files in `tests/yml_tests/`, and reusable resources for those tests `tests/yml_tests/Resources/`.
+
+```yaml
+
+tests:
+- Test Nisar Product L1 RSLC: # this is a test case
+ product: NISAR_L1_PR_RSLC_087_039_D_114_2005_DHDH_A_20251102T222008_20251102T222017_T00407_N_P_J_001.yml # this file should be in `tests/yml_tests/Resources/`. See other yml files in the folder to see how you might structure the yml object
+ product_level: L1
+
+- Test Nisar Product L2 GSLC: # this is another test case
+ product: NISAR_L2_PR_GSLC_087_039_D_112_2005_DHDH_A_20251102T221859_20251102T221935_T00407_N_F_J_001.yml
+ product_level: L2
+```
+
+We can create the mapping from our yaml test cases in `tests/yml_tests/pytest-config.yml`, which will be used to call the desired python function in `tests/yml_tests/pytest-managers.py`
+
+In `tests/yml_tests/pytest-config.yml`:
+```yaml
+- For running ASFProduct tests:
+ required_keys: ['product', 'product_level'] # the keys the test case requires
+ method: test_NISARProduct # the python function in pytest-managers.py that will be called
+ required_in_title: Test Nisar Product # (OPTIONAL) will only run test cases defined with `Test Nisar Product` in the name, so the above two test cases would be run with our tests.
+```
+
+
+In `tests/yml_tests/pytest-managers.py`:
+```python
+def test_new_endpoint(**args) -> None: # Must match the name in pytest-config.yml like above for `method`
+ test_new_endpoint(client=client, **args)
+```
diff --git a/cdk/app.py b/cdk/app.py
index 9868486..8781e2a 100644
--- a/cdk/app.py
+++ b/cdk/app.py
@@ -8,12 +8,12 @@
app = cdk.App()
-staging = app.node.try_get_context('staging')
-if staging is None:
- staging = False
-
+staging = False
suffix = ''
-if staging:
+api_stage = app.node.try_get_context('api_stage')
+
+if api_stage.endswith('staging'):
+ staging = True
suffix = '-Staging'
SearchAPIStack(app, f"SearchAPIStack{suffix}",
@@ -24,6 +24,7 @@
# Uncomment the next line to specialize this stack for the AWS Account
# and Region that are implied by the current CLI configuration.
staging=staging,
+ api_stage=api_stage,
env=cdk.Environment(
account=os.getenv('CDK_DEFAULT_ACCOUNT'),
region=os.getenv('CDK_DEFAULT_REGION')
diff --git a/cdk/cdk/cdk_stack.py b/cdk/cdk/cdk_stack.py
index bd46d7a..80999df 100644
--- a/cdk/cdk/cdk_stack.py
+++ b/cdk/cdk/cdk_stack.py
@@ -12,7 +12,7 @@
class SearchAPIStack(Stack):
- def __init__(self, scope: Construct, construct_id: str, staging: bool = False, **kwargs) -> None:
+ def __init__(self, scope: Construct, construct_id: str, api_stage: str, staging: bool = False, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
try:
@@ -60,7 +60,7 @@ def __init__(self, scope: Construct, construct_id: str, staging: bool = False, *
memory_size=5308,
code=lambda_.DockerImageCode.from_image_asset(
directory='..',
- # build_args={'MATURITY': }
+ build_args={'MATURITY': api_stage},
),
**lambda_vpc_kwargs,
)
diff --git a/requirements.txt b/requirements.txt
index 232a788..de9c4c4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,13 +1,13 @@
anyio==3.6.2
certifi>=2023.7.22
click==8.1.3
-dnspython==2.3.0
+dnspython>=2.6.1
email-validator>=2.0
-fastapi>=0.115.12
-h11==0.14.0
+fastapi>=0.116.1
+h11<0.15.0
httpcore==0.16.3
-httpx==0.23.3
-idna==3.4
+httpx<0.29.0
+idna==3.10
itsdangerous==2.1.2
jinja2>=3.1.3
orjson>=3.9.15
@@ -17,14 +17,13 @@ python-multipart>=0.0.7
PyYAML==6.0.2
rfc3986==1.5.0
sniffio==1.3.0
-typing_extensions==4.10.0
+typing_extensions>=4.10.0
ujson==5.7.0
uvicorn==0.21.1
watchfiles==0.19.0
-asf_search==9.0.8
+asf-search[asf-enumeration]==10.1.1
python-json-logger==2.0.7
-asf_enumeration
pyshp==2.1.3
geopandas
diff --git a/src/SearchAPI/application/__init__.py b/src/SearchAPI/application/__init__.py
index 56ff4d4..f502421 100644
--- a/src/SearchAPI/application/__init__.py
+++ b/src/SearchAPI/application/__init__.py
@@ -2,6 +2,5 @@
from .output import *
from .logger import *
from .log_router import *
-from .search import *
from .SearchAPISession import *
from .application import *
\ No newline at end of file
diff --git a/src/SearchAPI/application/application.py b/src/SearchAPI/application/application.py
index f44fbe4..825924d 100644
--- a/src/SearchAPI/application/application.py
+++ b/src/SearchAPI/application/application.py
@@ -1,7 +1,6 @@
import json
import os
-from typing import Optional
import dateparser
import asf_search as asf
@@ -15,12 +14,10 @@
from .asf_opts import process_baseline_request, process_search_request, process_wkt_request
from .health import get_cmr_health
from .models import BaselineSearchOptsModel, SearchOptsModel
-from .output import as_output, get_asf_search_script, make_filename
+from .output import as_output, get_asf_search_script
from .files_to_wkt import FilesToWKT
from . import constants
from .SearchAPISession import SearchAPISession
-from .search import get_aria_groups_for_frame, stack_aria_gunw
-import time
from asf_search.ASFSearchOptions.config import config as asf_config
asf_config['session'] = SearchAPISession()
@@ -37,6 +34,9 @@
allow_headers=["*"],
)
+cfg = load_config_maturity()
+cmr_health = get_cmr_health(cfg['cmr_base'], cfg['cmr_health'])
+
@router.api_route("/services/search/param", methods=["GET", "POST", "HEAD"])
async def query_params(searchOptions: SearchOptsModel = Depends(process_search_request)):
@@ -98,19 +98,8 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces
reference = searchOptions.reference
request_method = searchOptions.request_method
- if searchOptions.opts.dataset is not None:
- if searchOptions.opts.dataset[0] == asf.DATASET.ARIA_S1_GUNW:
- if output.lower() == 'count':
- return Response(
- content=str(len(get_aria_groups_for_frame(reference)[1])),
- status_code=200,
- media_type='text/html; charset=utf-8',
- headers=constants.DEFAULT_HEADERS
- )
-
- stack = stack_aria_gunw(reference)
- response_info = as_output(stack, output=output)
- return Response(**response_info)
+ is_frame_based = searchOptions.opts.dataset is not None
+
# Load the reference scene:
if output.lower() == 'python':
@@ -125,15 +114,24 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces
'Content-Disposition': f"attachment; filename={file_name}",
}
)
- try:
- reference_product = asf.granule_search(granule_list=[reference], opts=opts)[0]
- except (KeyError, IndexError, ValueError) as exc:
- raise HTTPException(detail=f"Reference scene not found: {reference}", status_code=400) from exc
+
+ # reference_product = None
+ if is_frame_based and opts.dataset[0] == asf.DATASET.ARIA_S1_GUNW:
+ try:
+ reference_product = asf.search(frame=int(reference), opts=opts, maxResults=1)[0]
+ except (KeyError, IndexError, ValueError) as exc:
+ raise HTTPException(detail=f"Reference scene not found with frame: {reference}", status_code=400) from exc
+
+ else:
+ try:
+ reference_product = asf.granule_search(granule_list=[reference], opts=opts)[0]
+ except (KeyError, IndexError, ValueError) as exc:
+ raise HTTPException(detail=f"Reference scene not found: {reference}", status_code=400) from exc
try:
if reference_product.get_stack_opts() is None:
reference_product = asf.ASFStackableProduct(args={'umm': reference_product.umm, 'meta': reference_product.meta}, session=reference_product.session)
- if not reference_product.has_baseline() or not reference_product.is_valid_reference():
+ if (not reference_product.has_baseline() or not reference_product.is_valid_reference() or not reference_product.has_baseline()) and not is_frame_based:
raise asf.exceptions.ASFBaselineError(f"Requested reference scene has no baseline")
except (asf.exceptions.ASFBaselineError, ValueError) as exc:
raise HTTPException(detail=f"Search failed to find results: {exc}", status_code=400)
@@ -231,6 +229,31 @@ async def file_to_wkt(files: list[UploadFile]):
headers=constants.DEFAULT_HEADERS
)
+# example: https://api.daac.asf.alaska.edu/services/redirect/NISAR_L2_STATIC/{granule_id}.h5
+# @router.get('/services/redirect/{short_name}/{granule_id}')
+# async def nisar_static_layer(short_name: str, granule_id: str):
+# """
+# short_name: the CMR static layer collection short name to search
+# granule_id: the granule id of the product to find the static layer for
+
+# returns: redirect to file url
+# """
+# opts = asf.ASFSearchOptions(host=cfg['cmr_base'])
+
+# try:
+# granule = asf.search(
+# granule_list=[granule_id],
+# opts=opts
+# )[0]
+# except IndexError:
+# raise HTTPException(status_code=400, detail=f'Unable to find static layer, provided scene named "{granule_id}" not found in CMR record')
+
+# static_layer = granule.get_static_layer(opts=asf.ASFSearchOptions(shortName=short_name))
+# if static_layer is None:
+# raise HTTPException(status_code=500, detail=f'Static layer not found for scene named "{granule_id}"')
+
+# return RedirectResponse(static_layer.properties['url'])
+
def validate_wkt(wkt: str):
try:
@@ -259,14 +282,11 @@ async def health_check():
api_logger.info(exc)
api_version = {'version': 'unknown'}
- cfg = load_config_maturity()
- cmr_health = get_cmr_health(cfg['cmr_base'], cfg['cmr_health'])
-
api_health = {
'ASFSearchAPI': {
'ok?': True,
'version': api_version['version'],
- 'config': load_config_maturity()
+ 'config': cfg
},
'CMRSearchAPI': cmr_health
}
diff --git a/src/SearchAPI/application/models.py b/src/SearchAPI/application/models.py
index c1a5e9e..38909d6 100644
--- a/src/SearchAPI/application/models.py
+++ b/src/SearchAPI/application/models.py
@@ -14,7 +14,7 @@ class SearchOptsModel(BaseModel):
"""
opts: InstanceOf[ASFSearchOptions]
request_method: str # ["GET", "POST", "HEAD"]
- output: Optional[str] = 'metalink'
+ output: str = 'metalink'
merged_args: dict = {}
output_types: ClassVar[list[str]] = ['metalink', 'csv', 'geojson', 'json', 'jsonlite', 'jsonlite2', 'kml', 'count', 'download', 'python']
diff --git a/src/SearchAPI/application/output.py b/src/SearchAPI/application/output.py
index 37d3c39..fdd6b3b 100644
--- a/src/SearchAPI/application/output.py
+++ b/src/SearchAPI/application/output.py
@@ -175,12 +175,14 @@ def get_asf_search_script(
) -> tuple[str, str]:
opts.session = None
+ # ASFSearchOptions formatting uses json.dumps for serialization. Add proper python capitalization
+ opts_str = str(opts).replace('true', 'True', -1).replace('false', 'False')
if search_endpoint == 'param':
file_name=make_filename('py', prefix='asf-search-script')
- output_script = asf_search_script_template.format(file_name, str(opts))
+ output_script = asf_search_script_template.format(file_name, opts_str)
else:
file_name=make_filename('py', prefix='asf-search-baseline-script')
- output_script = asf_search_baseline_script_template.format(file_name, reference, str(opts))
+ output_script = asf_search_baseline_script_template.format(file_name, reference, opts_str)
return file_name, output_script
def make_filename(suffix, prefix:str = 'asf-results'):
diff --git a/src/SearchAPI/application/search.py b/src/SearchAPI/application/search.py
deleted file mode 100644
index 1291adf..0000000
--- a/src/SearchAPI/application/search.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from collections import defaultdict
-
-import dateparser
-import asf_search as asf
-from asf_search import ASFSearchResults, ASFProduct
-from shapely.wkt import dumps as dump_to_wkt
-from shapely import Polygon
-
-from asf_enumeration import aria_s1_gunw
-
-def stack_aria_gunw(frame_id: str):
- reference, aria_groups = get_aria_groups_for_frame(frame_id)
-
- stack = ASFSearchResults([group.products[0] for group in aria_groups])
- target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack)
-
- return target_stack
-
-def get_aria_groups_for_frame(frame: str) -> tuple[ASFProduct, list[aria_s1_gunw.Sentinel1Acquisition]]:
- aria_frame = aria_s1_gunw.get_frame(frame_id=int(frame))
- groups = aria_s1_gunw.get_acquisitions(aria_frame)
- return groups[0].products[0], groups
diff --git a/tests/requirements.txt b/tests/requirements.txt
index ee0d43c..d8c1b5f 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -1,9 +1,9 @@
-pytest
-boto3
-requests
-requests-mock
-httpx
-pytest-automation
-pytest-xdist
-pytest-cov
-pytest-rerunfailures
+pytest==8.4.1
+boto3==1.39.4
+requests==2.32.4
+requests-mock==1.12.1
+httpx==0.23.3
+pytest-automation==3.0.0
+pytest-xdist==3.8.0
+pytest-cov==6.2.1
+pytest-rerunfailures==15.1