diff --git a/.flake8 b/.flake8
index 07ce45f..4b0605a 100644
--- a/.flake8
+++ b/.flake8
@@ -1,4 +1,5 @@
[flake8]
max-line-length = 120
-extend-ignore = W503
+# E203 incompatible with the ruff autoformatter
+extend-ignore = W503, E203
inline-quotes = double
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 25c0fb7..098ad15 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -19,6 +19,15 @@ jobs:
flake8_version: 6.0.0
plugins: flake8-isort==6.1.1 flake8-quotes==3.4.0 flake8-commas==4.0.0
+ ruff-format:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - uses: astral-sh/ruff-action@v1
+ with:
+ args: format --check --diff
+
mypy:
runs-on: ubuntu-latest
diff --git a/mandible/internal/__init__.py b/mandible/internal/__init__.py
index 435a001..4c431d3 100644
--- a/mandible/internal/__init__.py
+++ b/mandible/internal/__init__.py
@@ -1,5 +1,6 @@
from .types import Registry
__all__ = (
+ # ruff hint
"Registry",
)
diff --git a/mandible/jsonpath.py b/mandible/jsonpath.py
index 9efc4cc..738253d 100644
--- a/mandible/jsonpath.py
+++ b/mandible/jsonpath.py
@@ -52,8 +52,8 @@ def _get_dot_path(data: JsonValue, path: str) -> list[JsonValue]:
def _parse_dot_path(path: str) -> Generator[str]:
for part in path.split("."):
- if (m := BRACKET_PATTERN.search(part)):
- yield part[:m.start()]
+ if m := BRACKET_PATTERN.search(part):
+ yield part[: m.start()]
yield m.group(1)
else:
yield part
diff --git a/mandible/metadata_mapper/builder.py b/mandible/metadata_mapper/builder.py
index acb6b4a..e6e8843 100644
--- a/mandible/metadata_mapper/builder.py
+++ b/mandible/metadata_mapper/builder.py
@@ -42,6 +42,7 @@ def __init__(
def build(self, config: BuildConfig) -> Template:
return {
f"{config.directive_marker}{self.name}": {
+ # ruff hint
k: v.build(config) if isinstance(v, Builder) else v
for k, v in self.params.items()
},
@@ -139,6 +140,7 @@ def reformatted(
params,
)
+
#
# Operations
#
@@ -237,6 +239,7 @@ def build_with_config(template: Any, config: BuildConfig) -> Template:
"""
if isinstance(template, dict):
return {
+ # ruff hint
k: build_with_config(v, config)
for k, v in template.items()
}
diff --git a/mandible/metadata_mapper/context.py b/mandible/metadata_mapper/context.py
index f2fc744..848b600 100644
--- a/mandible/metadata_mapper/context.py
+++ b/mandible/metadata_mapper/context.py
@@ -42,14 +42,14 @@ def _replace_context_values(obj: Any, context_dict: dict) -> Any:
)
if len(result) > 1:
raise ContextValueError(
- f"context path {repr(obj.path)} returned more than "
- f"one value",
+ f"context path {repr(obj.path)} returned more than one value",
)
return result[0]
if isinstance(obj, dict):
return {
+ # ruff hint
k: _replace_context_values(v, context_dict)
for k, v in obj.items()
}
diff --git a/mandible/metadata_mapper/directive/directive.py b/mandible/metadata_mapper/directive/directive.py
index 367899c..df64cd6 100644
--- a/mandible/metadata_mapper/directive/directive.py
+++ b/mandible/metadata_mapper/directive/directive.py
@@ -29,6 +29,7 @@ class TemplateDirective(ABC):
A directive is a special marker in the metadata template which will be
replaced by the MetadataMapper.
"""
+
# Registry boilerplate
def __init_subclass__(
cls,
diff --git a/mandible/metadata_mapper/directive/reformatted.py b/mandible/metadata_mapper/directive/reformatted.py
index 39ea820..233cbf5 100644
--- a/mandible/metadata_mapper/directive/reformatted.py
+++ b/mandible/metadata_mapper/directive/reformatted.py
@@ -37,8 +37,7 @@ def call(self) -> Template:
value = self.value.encode()
else:
raise MetadataMapperError(
- "value must be of type 'bytes' or 'str' but got "
- f"{repr(type(self.value).__name__)}",
+ f"value must be of type 'bytes' or 'str' but got {repr(type(self.value).__name__)}",
)
return self.format_obj.get_value(
diff --git a/mandible/metadata_mapper/format/format.py b/mandible/metadata_mapper/format/format.py
index bf47a47..abf190b 100644
--- a/mandible/metadata_mapper/format/format.py
+++ b/mandible/metadata_mapper/format/format.py
@@ -68,7 +68,11 @@ def get_values(
"""Get a list of values from a file"""
with self.parse_data(file) as data:
- return {key: self._eval_key_wrapper(data, key) for key in keys}
+ return {
+ # ruff hint
+ key: self._eval_key_wrapper(data, key)
+ for key in keys
+ }
def get_value(self, file: IO[bytes], key: Key) -> Any:
"""Convenience function for getting a single value"""
@@ -114,6 +118,7 @@ def eval_key(data: T, key: Key) -> Any:
# Define formats that don't require extra dependencies
+
@dataclass
class Json(FileFormat[JsonValue]):
"""A Format for querying Json files.
@@ -157,6 +162,7 @@ class ZipMember(Format):
def __post_init__(self) -> None:
self._compiled_filters = {
+ # ruff hint
k: re.compile(v) if isinstance(v, str) else v
for k, v in self.filters.items()
}
@@ -217,6 +223,7 @@ def _matches_filters(self, zipinfo: zipfile.ZipInfo) -> bool:
ZIP_INFO_ATTRS = [
+ # ruff hint
name
for name, _ in inspect.getmembers(zipfile.ZipInfo, inspect.isdatadescriptor)
if not name.startswith("_")
@@ -233,7 +240,11 @@ def parse_data(file: IO[bytes]) -> Generator[dict]:
with zipfile.ZipFile(file, "r") as zf:
yield {
"infolist": [
- {k: getattr(info, k) for k in ZIP_INFO_ATTRS}
+ {
+ # ruff hint
+ k: getattr(info, k)
+ for k in ZIP_INFO_ATTRS
+ }
for info in zf.infolist()
],
"filename": zf.filename,
diff --git a/mandible/metadata_mapper/format/h5.py b/mandible/metadata_mapper/format/h5.py
index 482216d..d0e2332 100644
--- a/mandible/metadata_mapper/format/h5.py
+++ b/mandible/metadata_mapper/format/h5.py
@@ -56,7 +56,9 @@ def normalize(node_val: Any) -> Any:
return float(node_val)
if isinstance(node_val, np.ndarray):
value = [
- x.decode("utf-8") if isinstance(x, bytes) else x for x in node_val.tolist()
+ # ruff hint
+ x.decode("utf-8") if isinstance(x, bytes) else x
+ for x in node_val.tolist()
]
return value
if isinstance(node_val, bytes):
diff --git a/mandible/metadata_mapper/format/placeholder.py b/mandible/metadata_mapper/format/placeholder.py
index ab7d37a..1421eb5 100644
--- a/mandible/metadata_mapper/format/placeholder.py
+++ b/mandible/metadata_mapper/format/placeholder.py
@@ -12,10 +12,10 @@ class _PlaceholderBase(FileFormat[None], register=False):
"""Base class for defining placeholder implementations for classes that
require extra dependencies to be installed.
"""
+
def __init__(self, dep: str):
raise Exception(
- f"{dep} must be installed to use the {self.__class__.__name__} "
- "format class",
+ f"{dep} must be installed to use the {self.__class__.__name__} format class",
)
@staticmethod
diff --git a/mandible/metadata_mapper/mapper.py b/mandible/metadata_mapper/mapper.py
index 85befff..a782ff5 100644
--- a/mandible/metadata_mapper/mapper.py
+++ b/mandible/metadata_mapper/mapper.py
@@ -39,8 +39,7 @@ def get_metadata(self, context: Context) -> Template:
raise
except Exception as e:
raise MetadataMapperError(
- f"failed to inject context values into source "
- f"{repr(name)}: {e}",
+ f"failed to inject context values into source {repr(name)}: {e}",
) from e
try:
@@ -157,6 +156,7 @@ def _get_directive_name(
debug_path: str,
) -> Optional[tuple[str, dict[str, Template]]]:
directive_configs = [
+ # ruff hint
(k, v)
for (k, v) in value.items()
if k.startswith(self.directive_marker)
@@ -165,9 +165,9 @@ def _get_directive_name(
return None
if len(directive_configs) > 1:
+ directive_names = ", ".join(repr(k) for k, _ in directive_configs)
raise TemplateError(
- "multiple directives found in config: "
- f"{', '.join(repr(k) for k, v in directive_configs)}",
+ f"multiple directives found in config: {directive_names}",
debug_path,
)
@@ -175,8 +175,7 @@ def _get_directive_name(
if not isinstance(directive_config, dict):
raise TemplateError(
- "directive body should be type 'dict' not "
- f"{repr(directive_config.__class__.__name__)}",
+ f"directive body should be type 'dict' not {repr(directive_config.__class__.__name__)}",
f"{debug_path}.{directive_name}",
)
@@ -190,7 +189,7 @@ def _get_directive(
config: dict[str, Template],
debug_path: str,
) -> TemplateDirective:
- cls = DIRECTIVE_REGISTRY.get(directive_name[len(self.directive_marker):])
+ cls = DIRECTIVE_REGISTRY.get(directive_name[len(self.directive_marker) :])
if cls is None:
raise TemplateError(
f"invalid directive {repr(directive_name)}",
@@ -201,9 +200,7 @@ def _get_directive(
# Ignore the `self`, `context`, and `sources` parameters
required_keys = set(
- argspec.args[3:-len(argspec.defaults)]
- if argspec.defaults else
- argspec.args[3:],
+ argspec.args[3 : -len(argspec.defaults)] if argspec.defaults else argspec.args[3:],
)
config_keys = set(config.keys())
diff = required_keys - config_keys
@@ -213,14 +210,14 @@ def _get_directive(
if len(diff) > 1:
s = "s"
raise TemplateError(
- f"missing key{s}: "
- f"{', '.join(repr(d) for d in sorted(diff))}",
+ f"missing key{s}: {', '.join(repr(d) for d in sorted(diff))}",
debug_path,
)
# For forward compatibility, ignore any unexpected keys
all_keys = set(argspec.args[2:])
kwargs = {
+ # ruff hint
k: v
for k, v in config.items()
if k in all_keys
diff --git a/mandible/metadata_mapper/source_provider.py b/mandible/metadata_mapper/source_provider.py
index cb76c0d..074f1aa 100644
--- a/mandible/metadata_mapper/source_provider.py
+++ b/mandible/metadata_mapper/source_provider.py
@@ -46,6 +46,7 @@ def __init__(self, config: dict):
def get_sources(self) -> dict[str, Source]:
return {
+ # ruff hint
key: self._create_source(key, config)
for key, config in self.config.items()
}
@@ -85,8 +86,7 @@ def _create_object(
if not issubclass(cls, base_cls):
raise SourceProviderError(
- f"invalid {key} type {repr(cls_name)} must be a subclass of "
- f"{repr(base_cls.__name__)}",
+ f"invalid {key} type {repr(cls_name)} must be a subclass of {repr(base_cls.__name__)}",
)
return self._instantiate_class(cls, config)
@@ -111,6 +111,7 @@ def _get_class_from_registry(
def _instantiate_class(self, cls: type[T], config: dict[str, Any]) -> T:
kwargs = {
+ # ruff hint
k: self._convert_arg(cls, k, v)
for k, v in config.items()
if k != "class"
@@ -124,6 +125,7 @@ def _convert_arg(self, parent_cls: type[Any], key: str, arg: Any) -> Any:
return self._create_object(parent_cls, key, arg)
return {
+ # ruff hint
k: self._convert_arg(parent_cls, k, v)
for k, v in arg.items()
}
diff --git a/mandible/metadata_mapper/storage/placeholder.py b/mandible/metadata_mapper/storage/placeholder.py
index 7d5643c..d85aac4 100644
--- a/mandible/metadata_mapper/storage/placeholder.py
+++ b/mandible/metadata_mapper/storage/placeholder.py
@@ -12,10 +12,10 @@ class _PlaceholderBase(Storage, register=False):
Base class for defining placeholder implementations for classes that
require extra dependencies to be installed
"""
+
def __init__(self, dep: str):
raise Exception(
- f"{dep} must be installed to use the {self.__class__.__name__} "
- "format class",
+ f"{dep} must be installed to use the {self.__class__.__name__} format class",
)
def open_file(self, context: Context) -> IO[bytes]:
diff --git a/mandible/metadata_mapper/storage/storage.py b/mandible/metadata_mapper/storage/storage.py
index 949ab24..74a97d4 100644
--- a/mandible/metadata_mapper/storage/storage.py
+++ b/mandible/metadata_mapper/storage/storage.py
@@ -31,6 +31,7 @@ def open_file(self, context: Context) -> IO[bytes]:
# Define storages that don't require extra dependencies
+
@dataclass
class Dummy(Storage):
"""A dummy storage that returns a hardcoded byte stream.
@@ -54,6 +55,7 @@ class FilteredStorage(Storage, register=False):
"""A storage which matches a set of filters on the context's files and
returns data from the matching file.
"""
+
# Begin class definition
filters: dict[str, Any] = field(default_factory=dict)
@@ -64,6 +66,7 @@ def __post_init__(self) -> None:
def _compiled_filters(self) -> dict[str, Any]:
if self._compiled_filters_cache is None:
self._compiled_filters_cache = {
+ # ruff hint
k: re.compile(v) if isinstance(v, str) else v
for k, v in self.filters.items()
}
diff --git a/mandible/umm_classes/base.py b/mandible/umm_classes/base.py
index 2af45e3..e7162a3 100644
--- a/mandible/umm_classes/base.py
+++ b/mandible/umm_classes/base.py
@@ -66,12 +66,10 @@ def __init__(self, granule: CMAGranule):
self.granule = granule
@overload
- def date_to_str(self, date: datetime.date) -> str:
- ...
+ def date_to_str(self, date: datetime.date) -> str: ...
@overload
- def date_to_str(self, date: None) -> None:
- ...
+ def date_to_str(self, date: None) -> None: ...
def date_to_str(self, date: Optional[datetime.date]) -> Optional[str]:
"""Serialize a datetime.date or datetime.datetime as a string using the
@@ -207,7 +205,8 @@ def get_ummg(self) -> Ummg:
additional_attributes=sorted(
self.get_additional_attributes(),
key=lambda attr: attr["Name"],
- ) or None,
+ )
+ or None,
cloud_cover=self.get_cloud_cover(),
collection_reference=self.get_collection_reference(),
data_granule=self.get_data_granule(),
@@ -218,6 +217,7 @@ def get_ummg(self) -> Ummg:
metadata_specification=self.get_metadata_specification(),
native_projection_names=self.get_native_projection_names() or None,
orbit_calculated_spatial_domains=(
+ # ruff hint
self.get_orbit_calculated_spatial_domains() or None
),
pge_version_class=self.get_pge_version_class(),
@@ -269,6 +269,7 @@ def get_archive_and_distribution_information(
def get_data_granule(self) -> DataGranule:
return data_granule(
archive_and_distribution_information=(
+ # ruff hint
self.get_archive_and_distribution_information() or None
),
day_night_flag=self.get_day_night_flag(),
diff --git a/mandible/umm_classes/factory.py b/mandible/umm_classes/factory.py
index 9477b17..4cdd67a 100644
--- a/mandible/umm_classes/factory.py
+++ b/mandible/umm_classes/factory.py
@@ -168,8 +168,7 @@ def spatial_extent(
if not obj:
raise ValueError(
- "one of 'granule_localities', 'horizontal_spatial_domain', or "
- "'vertical_spatial_domains' is required",
+ "one of 'granule_localities', 'horizontal_spatial_domain', or 'vertical_spatial_domains' is required",
)
return obj
diff --git a/mandible/umm_classes/related_url_builder.py b/mandible/umm_classes/related_url_builder.py
index 323ad83..d10f9c1 100644
--- a/mandible/umm_classes/related_url_builder.py
+++ b/mandible/umm_classes/related_url_builder.py
@@ -26,7 +26,7 @@ def __init__(self, file: CMAGranuleFile, include_s3_uri: bool = True):
self.include_s3_uri = include_s3_uri
def get_http_description(self) -> Optional[str]:
- return f'Download {self.file["fileName"]}'
+ return f"Download {self.file['fileName']}"
def get_http_format(self) -> Optional[str]:
return None
@@ -89,10 +89,7 @@ def get_related_url_s3(self) -> RelatedUrl:
)
def get_s3_description(self) -> Optional[str]:
- return (
- "This link provides direct download access via S3 to "
- f'{self.file["fileName"]}'
- )
+ return f"This link provides direct download access via S3 to {self.file['fileName']}"
def get_s3_format(self) -> Optional[str]:
return None
@@ -133,10 +130,7 @@ def __init__(
self.mission = mission
def get_http_url(self) -> str:
- return (
- f"https://{self.download_host}/{self.processing_type}"
- f'/{self.mission}/{self.file["fileName"]}'
- )
+ return f"https://{self.download_host}/{self.processing_type}/{self.mission}/{self.file['fileName']}"
class TeaUrlBuilder(RelatedUrlBuilder):
@@ -155,5 +149,5 @@ def __init__(
def get_http_url(self) -> str:
return urllib.parse.urljoin(
self.download_url,
- f'{self.path_prefix}/{self.file["key"]}',
+ f"{self.path_prefix}/{self.file['key']}",
)
diff --git a/mandible/umm_classes/types.py b/mandible/umm_classes/types.py
index 29dd305..9b43c58 100644
--- a/mandible/umm_classes/types.py
+++ b/mandible/umm_classes/types.py
@@ -13,6 +13,7 @@ class AccessConstraints(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#195
"""
+
Description: NotRequired[str]
Value: Union[float, int]
@@ -22,6 +23,7 @@ class AdditionalAttribute(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1057
"""
+
Name: str
Values: list[str]
@@ -31,6 +33,7 @@ class ArchiveAndDistributionInformationFilePackageType(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#263
"""
+
Name: str
SizeInBytes: NotRequired[int]
Size: NotRequired[Union[float, int]]
@@ -46,6 +49,7 @@ class ArchiveAndDistributionInformationFileType(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#309
"""
+
Name: str
SizeInBytes: NotRequired[int]
Size: NotRequired[Union[float, int]]
@@ -67,6 +71,7 @@ class BoundingRectangle(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#591
"""
+
WestBoundingCoordinate: Union[float, int]
NorthBoundingCoordinate: Union[float, int]
EastBoundingCoordinate: Union[float, int]
@@ -78,6 +83,7 @@ class Boundary(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#625
"""
+
Points: list["Point"]
@@ -86,6 +92,7 @@ class Characteristic(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1008
"""
+
Name: str
Value: str
@@ -95,6 +102,7 @@ class Checksum(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1159
"""
+
Value: str
Algorithm: str
@@ -104,6 +112,7 @@ class CollectionReferenceShortNameVersion(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#166
"""
+
ShortName: str
Version: str
@@ -113,6 +122,7 @@ class CollectionReferenceEntryTitle(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#184
"""
+
EntryTitle: str
@@ -127,6 +137,7 @@ class DataGranule(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#213
"""
+
ArchiveAndDistributionInformation: NotRequired[list[ArchiveAndDistributionInformation]]
ReprocessingPlanned: NotRequired[str]
ReprocessingActual: NotRequired[str]
@@ -140,6 +151,7 @@ class ExclusiveZone(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#640
"""
+
Boundaries: list[Boundary]
@@ -148,6 +160,7 @@ class Geometry(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#525
"""
+
Points: NotRequired[list["Point"]]
BoundingRectangles: NotRequired[list[BoundingRectangle]]
GPolygons: NotRequired[list["GPolygon"]]
@@ -159,6 +172,7 @@ class GPolygon(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#611
"""
+
Boundary: Boundary
ExclusiveZone: NotRequired[ExclusiveZone]
@@ -168,6 +182,7 @@ class HorizontalSpatialDomain(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#497
"""
+
# TODO(reweeden): Implement
ZoneIdentifier: NotRequired[dict[str, Any]]
Geometry: NotRequired[Geometry]
@@ -182,6 +197,7 @@ class Identifier(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#353
"""
+
Identifier: str
IdentifierType: str
IdentifierName: NotRequired[str]
@@ -192,6 +208,7 @@ class Instrument(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#968
"""
+
ShortName: str
Characteristics: NotRequired[list[Characteristic]]
ComposedOf: NotRequired[list["Instrument"]]
@@ -203,6 +220,7 @@ class Line(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#655
"""
+
Points: list["Point"]
@@ -211,6 +229,7 @@ class MeasuredParameter(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#840
"""
+
ParameterName: str
# TODO(reweeden): Implement
QAStats: NotRequired[dict[str, Any]]
@@ -223,6 +242,7 @@ class MetadataSpecification(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1285
"""
+
URL: str
Name: str
Version: str
@@ -233,6 +253,7 @@ class OrbitCalculatedSpatialDomain(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#786
"""
+
OrbitalModelName: NotRequired[str]
OrbitNumber: NotRequired[int]
BeginOrbitNumber: NotRequired[int]
@@ -246,6 +267,7 @@ class PGEVersionClass(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#403
"""
+
PGEName: NotRequired[str]
PGEVersion: str
@@ -255,6 +277,7 @@ class Platform(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#949
"""
+
ShortName: str
Instruments: NotRequired[list[Instrument]]
@@ -264,6 +287,7 @@ class Point(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#611
"""
+
Longitude: Union[float, int]
Latitude: Union[float, int]
@@ -273,6 +297,7 @@ class Project(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1028
"""
+
ShortName: str
Campaigns: NotRequired[list[str]]
@@ -282,6 +307,7 @@ class ProviderDate(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#144
"""
+
Date: str
Type: str
@@ -291,6 +317,7 @@ class RangeDateTime(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#447
"""
+
BeginningDateTime: str
EndingDateTime: NotRequired[str]
@@ -300,6 +327,7 @@ class RelatedUrl(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1112
"""
+
URL: str
Type: str
Subtype: NotRequired[str]
@@ -315,6 +343,7 @@ class SpatialExtent(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#465
"""
+
GranuleLocalities: NotRequired[list[str]]
HorizontalSpatialDomain: NotRequired["HorizontalSpatialDomain"]
# TODO(reweeden): Implement
@@ -326,6 +355,7 @@ class TemporalExtentRangeDateTime(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#428
"""
+
RangeDateTime: RangeDateTime
@@ -334,6 +364,7 @@ class TemporalExtentSingleDateTime(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#437
"""
+
SingleDateTime: str
@@ -348,6 +379,7 @@ class TilingIdentificationSystem(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#1081
"""
+
TilingIdentificationSystemName: str
# TODO(reweeden): Implement
Coordinate1: dict[str, Any]
@@ -360,6 +392,7 @@ class Ummg(TypedDict):
https://git.earthdata.nasa.gov/projects/EMFD/repos/unified-metadata-model/browse/granule/v1.6.5/umm-g-json-schema.json#7
"""
+
GranuleUR: str
ProviderDates: list[ProviderDate]
CollectionReference: CollectionReference
@@ -381,6 +414,7 @@ class Ummg(TypedDict):
GridMappingNames: NotRequired[list[str]]
MetadataSpecification: MetadataSpecification
+
# Other TypedDict definitions
diff --git a/pyproject.toml b/pyproject.toml
index b4a3b56..30d6cf4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -60,3 +60,10 @@ markers = [
[tool.isort]
profile = "black"
+
+[tool.ruff]
+line-length = 120
+
+[tool.ruff.lint]
+# Add the `line-too-long` rule to the enforced rule set.
+extend-select = ["E501"]
diff --git a/tests/integration_tests/test_builder.py b/tests/integration_tests/test_builder.py
index 4be6169..258cf2b 100644
--- a/tests/integration_tests/test_builder.py
+++ b/tests/integration_tests/test_builder.py
@@ -6,21 +6,26 @@
@pytest.fixture
def source_provider(config):
- return ConfigSourceProvider({
- "fixed_name_file": config["sources"]["fixed_name_file"],
- "name_match_file": config["sources"]["name_match_file"],
- })
+ return ConfigSourceProvider(
+ {
+ "fixed_name_file": config["sources"]["fixed_name_file"],
+ "name_match_file": config["sources"]["name_match_file"],
+ },
+ )
def test_template(source_provider, context):
mapper = MetadataMapper(
- template=build({
- "list": (
- mapped("fixed_name_file", "list")
- + mapped("name_match_file", "list")
- ),
- "number": mapped("fixed_name_file", "integer") + 20.5,
- }),
+ template=build(
+ {
+ "list": (
+ mapped("fixed_name_file", "list")
+ # ruff hint
+ + mapped("name_match_file", "list")
+ ),
+ "number": mapped("fixed_name_file", "integer") + 20.5,
+ },
+ ),
source_provider=source_provider,
)
@@ -32,9 +37,11 @@ def test_template(source_provider, context):
def test_template_default(source_provider, context):
mapper = MetadataMapper(
- template=build({
- "badkey": mapped("fixed_name_file", "badkey", default=None),
- }),
+ template=build(
+ {
+ "badkey": mapped("fixed_name_file", "badkey", default=None),
+ },
+ ),
source_provider=source_provider,
)
@@ -44,14 +51,18 @@ def test_template_default(source_provider, context):
def test_template_default_multiple_build(source_provider, context):
- base_template = build({
- "badkey": mapped("fixed_name_file", "badkey", default=None),
- })
+ base_template = build(
+ {
+ "badkey": mapped("fixed_name_file", "badkey", default=None),
+ },
+ )
mapper = MetadataMapper(
- template=build({
- **base_template,
- "goodkey": mapped("fixed_name_file", "integer"),
- }),
+ template=build(
+ {
+ **base_template,
+ "goodkey": mapped("fixed_name_file", "integer"),
+ },
+ ),
source_provider=source_provider,
)
diff --git a/tests/integration_tests/test_directives.py b/tests/integration_tests/test_directives.py
index 6d8e12d..2a97bec 100644
--- a/tests/integration_tests/test_directives.py
+++ b/tests/integration_tests/test_directives.py
@@ -155,9 +155,11 @@ def test_mapped_extra_parameter(context, fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
)
mapper.get_metadata(context) == {"foo": "value for foo"}
@@ -177,21 +179,23 @@ def test_reformatted_json_field_in_json():
"key": "foo",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": br"""
- {
- "some-field": "{\"foo\": \"bar\"}"
- }
- """,
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": rb"""
+ {
+ "some-field": "{\"foo\": \"bar\"}"
+ }
+ """,
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -214,21 +218,23 @@ def test_reformatted_json_field_in_xml():
"key": "foo",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": b"""
-
- {"foo": "bar"}
-
- """,
- },
- "format": {
- "class": "Xml",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": b"""
+
+ {"foo": "bar"}
+
+ """,
+ },
+ "format": {
+ "class": "Xml",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -251,21 +257,23 @@ def test_reformatted_json_field_in_xml_get_entire_value():
"key": "$",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": b"""
-
- {"foo": "bar"}
-
- """,
- },
- "format": {
- "class": "Xml",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": b"""
+
+ {"foo": "bar"}
+
+ """,
+ },
+ "format": {
+ "class": "Xml",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -288,21 +296,23 @@ def test_reformatted_xml_field_in_json():
"key": "/root/field",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": b"""
- {
- "foo": "bar"
- }
- """,
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": b"""
+ {
+ "foo": "bar"
+ }
+ """,
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -324,17 +334,19 @@ def test_reformatted_bad_type():
"key": "$",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": b'{"foo": true}',
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": b'{"foo": true}',
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -359,21 +371,23 @@ def test_reformatted_nested():
"key": "/root/field",
},
},
- source_provider=ConfigSourceProvider({
- "file": {
- "storage": {
- "class": "Dummy",
- "data": b"""
- {
- "foo": "bar"
- }
- """,
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "file": {
+ "storage": {
+ "class": "Dummy",
+ "data": b"""
+ {
+ "foo": "bar"
+ }
+ """,
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -491,9 +505,11 @@ def test_add_mapped_values(context, fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
)
assert mapper.get_metadata(context) == "value for foovalue for nested"
diff --git a/tests/integration_tests/test_full_example.py b/tests/integration_tests/test_full_example.py
index fd3abf4..22a4e58 100644
--- a/tests/integration_tests/test_full_example.py
+++ b/tests/integration_tests/test_full_example.py
@@ -50,77 +50,79 @@ def sources():
@pytest.fixture
def template():
- return build({
- "JsonMd": {
- # Simple queries
- "description": mapped("json", "description"),
- "total": mapped("json", "meta.summary.total"),
- "complete": mapped("json", "meta.summary.complete"),
- "null": mapped("json", "meta.null"),
- # JSONPath only queries
- "banana_price": mapped("json", "inventory[?name = 'Banana'].price"),
- "oreo_price": mapped(
- "json",
- "inventory[?name = 'Oreo'].price",
- default=4.49,
- ),
- "first_red_item": mapped(
- "json",
- "inventory[?attributes.color = 'red'].name",
- return_first=True,
- ),
- "in_stock_items": mapped(
- "json",
- "inventory[?in_stock = true].name",
- return_list=True,
- ),
- },
- "XmlMd": {
- "description": mapped("xml", "./description"),
- "total": mapped("xml", "./meta/summary/total"),
- "complete": mapped("xml", "./meta/summary/complete"),
- "null": mapped("xml", "./meta/null"),
- "banana_price": mapped("xml", "./inventory/item[name='Banana']/price"),
- "oreo_price": mapped(
- "xml",
- "./inventory/item[name='Oreo']/price",
- default=4.49,
- ),
- "first_red_item": mapped(
- "xml",
- "./inventory/item[attributes/color='red']/name",
- return_first=True,
- ),
- "in_stock_items": mapped(
- "xml",
- "./inventory/item[in_stock='true']/name",
- return_list=True,
- ),
- },
- "Bzip2JsonMd": {
- "description": mapped("bzip2json", "description"),
- "total": mapped("bzip2json", "meta.summary.total"),
- "complete": mapped("bzip2json", "meta.summary.complete"),
- "null": mapped("bzip2json", "meta.null"),
- # JSONPath only queries
- "banana_price": mapped("bzip2json", "inventory[?name = 'Banana'].price"),
- "oreo_price": mapped(
- "bzip2json",
- "inventory[?name = 'Oreo'].price",
- default=4.49,
- ),
- "first_red_item": mapped(
- "bzip2json",
- "inventory[?attributes.color = 'red'].name",
- return_first=True,
- ),
- "in_stock_items": mapped(
- "bzip2json",
- "inventory[?in_stock = true].name",
- return_list=True,
- ),
+ return build(
+ {
+ "JsonMd": {
+ # Simple queries
+ "description": mapped("json", "description"),
+ "total": mapped("json", "meta.summary.total"),
+ "complete": mapped("json", "meta.summary.complete"),
+ "null": mapped("json", "meta.null"),
+ # JSONPath only queries
+ "banana_price": mapped("json", "inventory[?name = 'Banana'].price"),
+ "oreo_price": mapped(
+ "json",
+ "inventory[?name = 'Oreo'].price",
+ default=4.49,
+ ),
+ "first_red_item": mapped(
+ "json",
+ "inventory[?attributes.color = 'red'].name",
+ return_first=True,
+ ),
+ "in_stock_items": mapped(
+ "json",
+ "inventory[?in_stock = true].name",
+ return_list=True,
+ ),
+ },
+ "XmlMd": {
+ "description": mapped("xml", "./description"),
+ "total": mapped("xml", "./meta/summary/total"),
+ "complete": mapped("xml", "./meta/summary/complete"),
+ "null": mapped("xml", "./meta/null"),
+ "banana_price": mapped("xml", "./inventory/item[name='Banana']/price"),
+ "oreo_price": mapped(
+ "xml",
+ "./inventory/item[name='Oreo']/price",
+ default=4.49,
+ ),
+ "first_red_item": mapped(
+ "xml",
+ "./inventory/item[attributes/color='red']/name",
+ return_first=True,
+ ),
+ "in_stock_items": mapped(
+ "xml",
+ "./inventory/item[in_stock='true']/name",
+ return_list=True,
+ ),
+ },
+ "Bzip2JsonMd": {
+ "description": mapped("bzip2json", "description"),
+ "total": mapped("bzip2json", "meta.summary.total"),
+ "complete": mapped("bzip2json", "meta.summary.complete"),
+ "null": mapped("bzip2json", "meta.null"),
+ # JSONPath only queries
+ "banana_price": mapped("bzip2json", "inventory[?name = 'Banana'].price"),
+ "oreo_price": mapped(
+ "bzip2json",
+ "inventory[?name = 'Oreo'].price",
+ default=4.49,
+ ),
+ "first_red_item": mapped(
+ "bzip2json",
+ "inventory[?attributes.color = 'red'].name",
+ return_first=True,
+ ),
+ "in_stock_items": mapped(
+ "bzip2json",
+ "inventory[?in_stock = true].name",
+ return_list=True,
+ ),
+ },
},
- })
+ )
@pytest.fixture
diff --git a/tests/integration_tests/test_metadata_mapper.py b/tests/integration_tests/test_metadata_mapper.py
index f9c5921..64db9f9 100644
--- a/tests/integration_tests/test_metadata_mapper.py
+++ b/tests/integration_tests/test_metadata_mapper.py
@@ -47,18 +47,17 @@ def test_empty_context(fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
)
context = Context()
with pytest.raises(
MetadataMapperError,
- match=(
- "failed to query source 'fixed_name_file': "
- "no files in context"
- ),
+ match="failed to query source 'fixed_name_file': no files in context",
):
mapper.get_metadata(context)
@@ -88,9 +87,11 @@ def test_custom_directive_marker(context, fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
directive_marker="#",
)
assert mapper.get_metadata(context) == {
@@ -108,9 +109,11 @@ def test_custom_directive_marker_long(context, fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
directive_marker="###",
)
assert mapper.get_metadata(context) == {
@@ -122,48 +125,50 @@ def test_custom_directive_marker_long(context, fixed_name_file_config):
def test_basic_py_source_provider(config, context):
mapper = MetadataMapper(
template=config["template"],
- source_provider=PySourceProvider({
- "fixed_name_file": FileSource(
- storage=LocalFile(
- filters={
- "name": "fixed_name_file.json",
- },
+ source_provider=PySourceProvider(
+ {
+ "fixed_name_file": FileSource(
+ storage=LocalFile(
+ filters={
+ "name": "fixed_name_file.json",
+ },
+ ),
+ format=Json(),
),
- format=Json(),
- ),
- "fixed_xml_file": FileSource(
- storage=LocalFile(
- filters={
- "name": "fixed_xml_file.xml",
- },
+ "fixed_xml_file": FileSource(
+ storage=LocalFile(
+ filters={
+ "name": "fixed_xml_file.xml",
+ },
+ ),
+ format=Xml(),
),
- format=Xml(),
- ),
- "namespace_xml_file": FileSource(
- storage=LocalFile(
- filters={
- "name": "xml_with_namespace.xml",
- },
+ "namespace_xml_file": FileSource(
+ storage=LocalFile(
+ filters={
+ "name": "xml_with_namespace.xml",
+ },
+ ),
+ format=Xml(),
),
- format=Xml(),
- ),
- "name_match_file": FileSource(
- storage=LocalFile(
- filters={
- "name": r".*match_me\.json",
- },
+ "name_match_file": FileSource(
+ storage=LocalFile(
+ filters={
+ "name": r".*match_me\.json",
+ },
+ ),
+ format=Json(),
),
- format=Json(),
- ),
- "name_match_file2": FileSource(
- storage=LocalFile(
- filters={
- "name": re.compile(r".*match_me\.json"),
- },
+ "name_match_file2": FileSource(
+ storage=LocalFile(
+ filters={
+ "name": re.compile(r".*match_me\.json"),
+ },
+ ),
+ format=Json(),
),
- format=Json(),
- ),
- }),
+ },
+ ),
)
assert mapper.get_metadata(context) == {
"foo": "value for foo",
@@ -218,27 +223,26 @@ def test_no_matching_files(context):
},
},
},
- source_provider=ConfigSourceProvider({
- "source_file": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "does not exist",
+ source_provider=ConfigSourceProvider(
+ {
+ "source_file": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "does not exist",
+ },
+ },
+ "format": {
+ "class": "Json",
},
- },
- "format": {
- "class": "Json",
},
},
- }),
+ ),
)
with pytest.raises(
MetadataMapperError,
- match=(
- "failed to query source 'source_file': "
- "no files matched filters"
- ),
+ match="failed to query source 'source_file': no files matched filters",
):
mapper.get_metadata(context)
@@ -253,17 +257,16 @@ def test_source_non_existent_key(context, fixed_name_file_config):
},
},
},
- source_provider=ConfigSourceProvider({
- "fixed_name_file": fixed_name_file_config,
- }),
+ source_provider=ConfigSourceProvider(
+ {
+ "fixed_name_file": fixed_name_file_config,
+ },
+ ),
)
with pytest.raises(
MetadataMapperError,
- match=(
- "failed to query source 'fixed_name_file': "
- "key not found 'does_not_exist'"
- ),
+ match="failed to query source 'fixed_name_file': key not found 'does_not_exist'",
):
mapper.get_metadata(context)
@@ -332,17 +335,19 @@ def test_multiple_directives(context):
def test_context_values_missing():
mapper = MetadataMapper(
template={},
- source_provider=ConfigSourceProvider({
- "test": {
- "storage": {
- "class": "LocalFile",
- "filters": "$.meta.does-not-exist",
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "test": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": "$.meta.does-not-exist",
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
@@ -360,17 +365,19 @@ def test_context_values_missing():
def test_context_values_multiple_values():
mapper = MetadataMapper(
template={},
- source_provider=ConfigSourceProvider({
- "test": {
- "storage": {
- "class": "LocalFile",
- "filters": "$.meta.foo[*].bar",
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "test": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": "$.meta.foo[*].bar",
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context(
meta={
@@ -396,17 +403,19 @@ def test_context_values_multiple_values():
def test_context_values_invalid():
mapper = MetadataMapper(
template={},
- source_provider=ConfigSourceProvider({
- "test": {
- "storage": {
- "class": "LocalFile",
- "filters": "$.meta.bad-syntax[",
- },
- "format": {
- "class": "Json",
+ source_provider=ConfigSourceProvider(
+ {
+ "test": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": "$.meta.bad-syntax[",
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- }),
+ ),
)
context = Context()
diff --git a/tests/integration_tests/test_umm_classes.py b/tests/integration_tests/test_umm_classes.py
index cfc0fd3..c6f2fac 100644
--- a/tests/integration_tests/test_umm_classes.py
+++ b/tests/integration_tests/test_umm_classes.py
@@ -409,8 +409,8 @@ def get_tiling_identification_system(self) -> TilingIdentificationSystem:
"URL": "s3://test_bucket/test_prefix/test_file.txt",
"Type": "GET DATA VIA DIRECT ACCESS",
"Description": (
- "This link provides direct download access via S3 to "
- "test_file.txt"
+ # ruff hint
+ "This link provides direct download access via S3 to test_file.txt"
),
},
],
diff --git a/tests/test_builder.py b/tests/test_builder.py
index 48d6eb0..ebfc754 100644
--- a/tests/test_builder.py
+++ b/tests/test_builder.py
@@ -154,10 +154,13 @@ def test_build_add():
def test_build_add_automatic():
- template = mapped(
- source="some_source",
- key="some.key",
- ) + 10
+ template = (
+ mapped(
+ source="some_source",
+ key="some.key",
+ )
+ + 10
+ )
assert build(template) == {
"@add": {
@@ -206,10 +209,13 @@ def test_build_floordiv():
def test_build_floordiv_automatic():
- template = mapped(
- source="some_source",
- key="some.key",
- ) // 10
+ template = (
+ mapped(
+ source="some_source",
+ key="some.key",
+ )
+ // 10
+ )
assert build(template) == {
"@floordiv": {
@@ -258,10 +264,13 @@ def test_build_mul():
def test_build_mul_automatic():
- template = mapped(
- source="some_source",
- key="some.key",
- ) * 10
+ template = (
+ mapped(
+ source="some_source",
+ key="some.key",
+ )
+ * 10
+ )
assert build(template) == {
"@mul": {
@@ -310,10 +319,13 @@ def test_build_sub():
def test_build_sub_automatic():
- template = mapped(
- source="some_source",
- key="some.key",
- ) - 10
+ template = (
+ mapped(
+ source="some_source",
+ key="some.key",
+ )
+ - 10
+ )
assert build(template) == {
"@sub": {
@@ -362,10 +374,13 @@ def test_build_truediv():
def test_build_truediv_automatic():
- template = mapped(
- source="some_source",
- key="some.key",
- ) / 10
+ template = (
+ mapped(
+ source="some_source",
+ key="some.key",
+ )
+ / 10
+ )
assert build(template) == {
"@truediv": {
diff --git a/tests/test_context.py b/tests/test_context.py
index f0f3946..218c50a 100644
--- a/tests/test_context.py
+++ b/tests/test_context.py
@@ -54,18 +54,27 @@ def test_replace_context_values_noop(context):
def test_replace_context_values_direct(context):
- assert replace_context_values(
- ContextValue("$.meta.foo"),
- context,
- ) == "foo-value"
- assert replace_context_values(
- ContextValue("$.meta.bar"),
- context,
- ) == "bar-value"
- assert replace_context_values(
- ContextValue("$.meta.a-number"),
- context,
- ) == 1
+ assert (
+ replace_context_values(
+ ContextValue("$.meta.foo"),
+ context,
+ )
+ == "foo-value"
+ )
+ assert (
+ replace_context_values(
+ ContextValue("$.meta.bar"),
+ context,
+ )
+ == "bar-value"
+ )
+ assert (
+ replace_context_values(
+ ContextValue("$.meta.a-number"),
+ context,
+ )
+ == 1
+ )
assert replace_context_values(
ContextValue("$.meta.a-list"),
context,
diff --git a/tests/test_directives.py b/tests/test_directives.py
index f72627a..419a32e 100644
--- a/tests/test_directives.py
+++ b/tests/test_directives.py
@@ -20,8 +20,7 @@ def test_all_directives_have_builder_class():
directive_names = set(DIRECTIVE_REGISTRY)
builder_names = set(_DIRECTIVE_BUILDER_REGISTRY)
- assert directive_names <= builder_names, \
- "Some directives don't have a builder class!"
+ assert directive_names <= builder_names, "Some directives don't have a builder class!"
def test_mapped_mutually_exclusive_key_options():
diff --git a/tests/test_log.py b/tests/test_log.py
index c864cf2..0970d0e 100644
--- a/tests/test_log.py
+++ b/tests/test_log.py
@@ -44,11 +44,13 @@ def test_init_custom_log_record_factory_update(caplog):
assert caplog.records[0].granule_name is None
assert caplog.records[0].workflow_execution_name is None
- init_custom_log_record_factory({
- "cumulus_meta": {
- "cumulus_version": "v0.0.0",
+ init_custom_log_record_factory(
+ {
+ "cumulus_meta": {
+ "cumulus_version": "v0.0.0",
+ },
},
- })
+ )
caplog.clear()
with caplog.at_level(logging.INFO):
diff --git a/tests/test_source.py b/tests/test_source.py
index 2414d0d..5361178 100644
--- a/tests/test_source.py
+++ b/tests/test_source.py
@@ -68,10 +68,7 @@ class CustomSource(Source):
arg1: str
def query_all_values(self, context: Context):
- self._values.update({
- key: key.key
- for key in self._keys
- })
+ self._values.update({key: key.key for key in self._keys})
source = CustomSource("foo")
source.add_key(Key("hello"))
diff --git a/tests/test_source_provider.py b/tests/test_source_provider.py
index b4f0a33..a0ea21a 100644
--- a/tests/test_source_provider.py
+++ b/tests/test_source_provider.py
@@ -51,61 +51,65 @@ def test_py_source_provider(sources):
def test_config_source_provider(sources):
- provider = ConfigSourceProvider({
- "foo": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "foo",
+ provider = ConfigSourceProvider(
+ {
+ "foo": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "foo",
+ },
},
- },
- "format": {
- "class": "Json",
- },
- },
- "bar": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "bar",
+ "format": {
+ "class": "Json",
},
},
- "format": {
- "class": "Json",
- },
- },
- "baz": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "baz",
+ "bar": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "bar",
+ },
+ },
+ "format": {
+ "class": "Json",
},
},
- "format": {
- "class": "ZipMember",
- "filters": {
- "filename": "foo",
+ "baz": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "baz",
+ },
},
"format": {
- "class": "Json",
+ "class": "ZipMember",
+ "filters": {
+ "filename": "foo",
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
},
- })
+ )
assert provider.get_sources() == sources
def test_config_source_provider_source_type():
- provider = ConfigSourceProvider({
- "foo": {
- "class": "DummySource",
- "arg1": "foobar",
- "storage": {
- "class": "LocalFile",
+ provider = ConfigSourceProvider(
+ {
+ "foo": {
+ "class": "DummySource",
+ "arg1": "foobar",
+ "storage": {
+ "class": "LocalFile",
+ },
},
},
- })
+ )
assert provider.get_sources() == {
"foo": DummySource(
@@ -116,23 +120,22 @@ def test_config_source_provider_source_type():
def test_config_source_provider_wrong_base_class_type():
- provider = ConfigSourceProvider({
- "foo": {
- "class": "DummySource",
- "arg1": "foobar",
- "storage": {
- # Dummy storage is not a FilteredStorage
- "class": "Dummy",
+ provider = ConfigSourceProvider(
+ {
+ "foo": {
+ "class": "DummySource",
+ "arg1": "foobar",
+ "storage": {
+ # Dummy storage is not a FilteredStorage
+ "class": "Dummy",
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
- match=(
- "failed to create source 'foo': invalid storage type 'Dummy' must "
- "be a subclass of 'FilteredStorage'"
- ),
+ match=("failed to create source 'foo': invalid storage type 'Dummy' must be a subclass of 'FilteredStorage'"),
):
provider.get_sources()
@@ -140,41 +143,43 @@ def test_config_source_provider_wrong_base_class_type():
@pytest.mark.h5
@pytest.mark.xml
def test_config_source_provider_all_formats():
- provider = ConfigSourceProvider({
- "json": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "foo",
+ provider = ConfigSourceProvider(
+ {
+ "json": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "foo",
+ },
},
- },
- "format": {
- "class": "Json",
- },
- },
- "xml": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "bar",
+ "format": {
+ "class": "Json",
},
},
- "format": {
- "class": "Xml",
- },
- },
- "h5": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "baz",
+ "xml": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "bar",
+ },
+ },
+ "format": {
+ "class": "Xml",
},
},
- "format": {
- "class": "H5",
+ "h5": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "baz",
+ },
+ },
+ "format": {
+ "class": "H5",
+ },
},
},
- })
+ )
assert provider.get_sources() == {
"json": FileSource(LocalFile(filters={"name": "foo"}), Json()),
@@ -190,29 +195,31 @@ def test_config_source_provider_empty():
def test_config_source_provider_context_values():
- provider = ConfigSourceProvider({
- "arg": {
- "storage": {
- "class": "LocalFile",
- "filters": "$.meta.filters",
- },
- "format": {
- "class": "Json",
- },
- },
- "arg_nested": {
- "storage": {
- "class": "LocalFile",
- "filters": {
- "name": "$.meta.name_filter",
- "dollar": "$$.meta.not-replaced",
+ provider = ConfigSourceProvider(
+ {
+ "arg": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": "$.meta.filters",
+ },
+ "format": {
+ "class": "Json",
},
},
- "format": {
- "class": "Json",
+ "arg_nested": {
+ "storage": {
+ "class": "LocalFile",
+ "filters": {
+ "name": "$.meta.name_filter",
+ "dollar": "$$.meta.not-replaced",
+ },
+ },
+ "format": {
+ "class": "Json",
+ },
},
},
- })
+ )
assert provider.get_sources() == {
"arg": FileSource(
@@ -234,13 +241,15 @@ def test_config_source_provider_context_values():
def test_config_source_provider_missing_storage():
- provider = ConfigSourceProvider({
- "source": {
- "format": {
- "class": "Json",
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "format": {
+ "class": "Json",
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
@@ -253,34 +262,35 @@ def test_config_source_provider_missing_storage():
def test_config_source_provider_invalid_storage():
- provider = ConfigSourceProvider({
- "source": {
- "storage": {
- "class": "NotARealStorage",
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "storage": {
+ "class": "NotARealStorage",
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
- match=(
- "failed to create source 'source': "
- "invalid storage type 'NotARealStorage'"
- ),
+ match="failed to create source 'source': invalid storage type 'NotARealStorage'",
):
provider.get_sources()
@pytest.mark.parametrize("cls_name", STORAGE_REGISTRY.keys())
def test_config_source_provider_invalid_storage_kwargs(cls_name):
- provider = ConfigSourceProvider({
- "source": {
- "storage": {
- "class": cls_name,
- "invalid_arg": 1,
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "storage": {
+ "class": cls_name,
+ "invalid_arg": 1,
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
@@ -295,13 +305,15 @@ def test_config_source_provider_invalid_storage_kwargs(cls_name):
@pytest.mark.s3
def test_config_source_provider_missing_format():
- provider = ConfigSourceProvider({
- "source": {
- "storage": {
- "class": "S3File",
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "storage": {
+ "class": "S3File",
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
@@ -315,23 +327,22 @@ def test_config_source_provider_missing_format():
@pytest.mark.s3
def test_config_source_provider_invalid_format():
- provider = ConfigSourceProvider({
- "source": {
- "storage": {
- "class": "S3File",
- },
- "format": {
- "class": "NotARealFormat",
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "storage": {
+ "class": "S3File",
+ },
+ "format": {
+ "class": "NotARealFormat",
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
- match=(
- "failed to create source 'source': "
- "invalid format type 'NotARealFormat'"
- ),
+ match="failed to create source 'source': invalid format type 'NotARealFormat'",
):
provider.get_sources()
@@ -339,17 +350,19 @@ def test_config_source_provider_invalid_format():
@pytest.mark.s3
@pytest.mark.parametrize("cls_name", FORMAT_REGISTRY.keys())
def test_config_source_provider_invalid_format_kwargs(cls_name):
- provider = ConfigSourceProvider({
- "source": {
- "storage": {
- "class": "S3File",
- },
- "format": {
- "class": cls_name,
- "invalid_arg": 1,
+ provider = ConfigSourceProvider(
+ {
+ "source": {
+ "storage": {
+ "class": "S3File",
+ },
+ "format": {
+ "class": cls_name,
+ "invalid_arg": 1,
+ },
},
},
- })
+ )
with pytest.raises(
SourceProviderError,
diff --git a/tests/test_storage.py b/tests/test_storage.py
index 080ed48..eac555c 100644
--- a/tests/test_storage.py
+++ b/tests/test_storage.py
@@ -40,10 +40,12 @@ def test_registry_error():
def test_local_file(data_path):
context = Context(
- files=[{
- "name": "local_file",
- "path": str(data_path / "local_file.txt"),
- }],
+ files=[
+ {
+ "name": "local_file",
+ "path": str(data_path / "local_file.txt"),
+ },
+ ],
)
storage = LocalFile(filters={"name": "local_file"})
@@ -53,10 +55,12 @@ def test_local_file(data_path):
def test_local_file_name_match(data_path):
context = Context(
- files=[{
- "name": "local_file",
- "path": str(data_path / "local_file.txt"),
- }],
+ files=[
+ {
+ "name": "local_file",
+ "path": str(data_path / "local_file.txt"),
+ },
+ ],
)
storage = LocalFile(filters={"name": "local_.*"})
@@ -66,10 +70,12 @@ def test_local_file_name_match(data_path):
def test_local_file_int_filter(data_path):
context = Context(
- files=[{
- "type": 0,
- "path": str(data_path / "local_file.txt"),
- }],
+ files=[
+ {
+ "type": 0,
+ "path": str(data_path / "local_file.txt"),
+ },
+ ],
)
storage = LocalFile(filters={"type": 0})
@@ -108,11 +114,13 @@ def test_s3_file_s3uri(s3_resource):
obj.upload_fileobj(io.BytesIO(b"Some remote file content\n"))
context = Context(
- files=[{
- "name": "s3_file",
- "bucket": "test-bucket",
- "key": "bucket_file.txt",
- }],
+ files=[
+ {
+ "name": "s3_file",
+ "bucket": "test-bucket",
+ "key": "bucket_file.txt",
+ },
+ ],
)
storage = S3File(filters={"name": "s3_file"})
@@ -128,11 +136,13 @@ def test_s3_file_s3fs_kwargs(s3_resource):
obj.upload_fileobj(io.BytesIO(b"Some remote file content\n"))
context = Context(
- files=[{
- "name": "s3_file",
- "bucket": "test-bucket",
- "key": "bucket_file.txt",
- }],
+ files=[
+ {
+ "name": "s3_file",
+ "bucket": "test-bucket",
+ "key": "bucket_file.txt",
+ },
+ ],
)
storage = S3File(
filters={"name": "s3_file"},
@@ -174,15 +184,19 @@ def create_file(bucket, name, contents=None, type="data"):
],
)
- storage = S3File(filters={
- "name": "file1.txt",
- })
+ storage = S3File(
+ filters={
+ "name": "file1.txt",
+ },
+ )
with storage.open_file(context) as f:
assert f.read() == b"Content from file1.txt\n"
- storage = S3File(filters={
- "type": "metadata",
- })
+ storage = S3File(
+ filters={
+ "type": "metadata",
+ },
+ )
with storage.open_file(context) as f:
assert f.read() == b"Content from file2.txt\n"
@@ -192,28 +206,43 @@ def test_cmr_query_params():
with pytest.raises(ValueError):
CmrQuery(url="foobar")
- assert CmrQuery(
- base_url="http://foo.bar",
- path="/search/granules",
- )._get_url() == "http://foo.bar/search/granules"
- assert CmrQuery(
- base_url="http://foo.bar",
- path="search/granules",
- )._get_url() == "http://foo.bar/search/granules"
- assert CmrQuery(
- base_url="http://foo.bar/",
- path="/search/granules",
- )._get_url() == "http://foo.bar/search/granules"
- assert CmrQuery(
- base_url="http://foo.bar/",
- path="search/granules",
- )._get_url() == "http://foo.bar/search/granules"
-
- assert CmrQuery(
- base_url="http://foo.bar",
- path="/search/granules",
- format="umm_json",
- )._get_url() == "http://foo.bar/search/granules.umm_json"
+ assert (
+ CmrQuery(
+ base_url="http://foo.bar",
+ path="/search/granules",
+ )._get_url()
+ == "http://foo.bar/search/granules"
+ )
+ assert (
+ CmrQuery(
+ base_url="http://foo.bar",
+ path="search/granules",
+ )._get_url()
+ == "http://foo.bar/search/granules"
+ )
+ assert (
+ CmrQuery(
+ base_url="http://foo.bar/",
+ path="/search/granules",
+ )._get_url()
+ == "http://foo.bar/search/granules"
+ )
+ assert (
+ CmrQuery(
+ base_url="http://foo.bar/",
+ path="search/granules",
+ )._get_url()
+ == "http://foo.bar/search/granules"
+ )
+
+ assert (
+ CmrQuery(
+ base_url="http://foo.bar",
+ path="/search/granules",
+ format="umm_json",
+ )._get_url()
+ == "http://foo.bar/search/granules.umm_json"
+ )
assert CmrQuery(token="foobar")._get_headers() == {
"Authorization": "foobar",