Skip to content

Commit 765a263

Browse files
committed
fix: fixed failing test for foundry pipeline introduced by previous commit
1 parent 947efdd commit 765a263

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

src/dve/pipeline/foundry_ddb_pipeline.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,17 @@ class FoundryDDBPipeline(DDBDVEPipeline):
2020
def persist_audit_records(self, submission_info: SubmissionInfo) -> URI:
2121
"""Write out key audit relations to parquet for persisting to datasets"""
2222
write_to = fh.joinuri(self.processed_files_path, submission_info.submission_id, "audit/")
23-
if isinstance(_get_implementation(output_uri), LocalFilesystemImplementation):
24-
output_uri = fh.file_uri_to_local_path(output_uri)
25-
output_uri.parent.mkdir(parents=True, exist_ok=True)
26-
output_uri = output_uri.as_posix()
23+
if isinstance(_get_implementation(write_to), LocalFilesystemImplementation):
24+
write_to = fh.file_uri_to_local_path(write_to)
25+
write_to.parent.mkdir(parents=True, exist_ok=True)
26+
write_to = write_to.as_posix()
2727
self.write_parquet(
2828
self._audit_tables._processing_status.get_relation(),
29-
write_to + "processing_status.parquet",
29+
fh.joinuri(write_to, "processing_status.parquet"),
3030
)
3131
self.write_parquet(
3232
self._audit_tables._submission_statistics.get_relation(),
33-
write_to + "submission_statistics.parquet",
33+
fh.joinuri(write_to, "submission_statistics.parquet"),
3434
)
3535
return write_to
3636

tests/test_pipeline/test_foundry_ddb_pipeline.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def test_foundry_runner_validation_fail(planet_test_files, temp_ddb_conn):
4949
output_loc, report_uri, audit_files = dve_pipeline.run_pipeline(sub_info)
5050
assert fh.get_resource_exists(report_uri)
5151
assert not output_loc
52-
assert len(list(fh.iter_prefix(audit_files))) == 2
52+
assert len(list(fh.iter_prefix(audit_files))) == 3
5353

5454

5555
def test_foundry_runner_validation_success(movies_test_files, temp_ddb_conn):
@@ -86,7 +86,7 @@ def test_foundry_runner_validation_success(movies_test_files, temp_ddb_conn):
8686
output_loc, report_uri, audit_files = dve_pipeline.run_pipeline(sub_info)
8787
assert fh.get_resource_exists(report_uri)
8888
assert len(list(fh.iter_prefix(output_loc))) == 2
89-
assert len(list(fh.iter_prefix(audit_files))) == 2
89+
assert len(list(fh.iter_prefix(audit_files))) == 3
9090

9191
def test_foundry_runner_error(planet_test_files, temp_ddb_conn):
9292
# using spark reader config - should error in file transformation - check gracefully handled

0 commit comments

Comments
 (0)