Skip to content

Commit bea3d08

Browse files
author
saul-data
committed
release for env id
1 parent 8c7f693 commit bea3d08

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

src/dataplane/pipelinerun/data_persist/pandas_s3_store.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@ def pipeline_pandas_s3_store(StoreKey, DataFrame, S3Client, Bucket, Expire=True,
1818

1919
# Start the timer
2020
start = datetime.now()
21+
EnvID = os.getenv("DP_ENVID")
2122

22-
InsertKey = "/dataplane-transfer/" + StoreKey+ "-" +os.getenv("DP_RUNID")+".parquet"
23+
InsertKey = f"/dataplane-transfer/{EnvID}/" + StoreKey+ "-" +os.getenv("DP_RUNID")+".parquet"
2324

2425
output_buffer=BytesIO()
2526
DataFrame.to_parquet(output_buffer,index=False,compression='gzip',engine='pyarrow',allow_truncated_timestamps=True)
@@ -44,7 +45,9 @@ def pipeline_pandas_s3_get(StoreKey, S3Client, Bucket):
4445
# Start the timer
4546
start = datetime.now()
4647

47-
InsertKey = "/dataplane-transfer/" + StoreKey+ "-" +os.getenv("DP_RUNID")+".parquet"
48+
EnvID = os.getenv("DP_ENVID")
49+
50+
InsertKey = f"/dataplane-transfer/{EnvID}/" + StoreKey+ "-" +os.getenv("DP_RUNID")+".parquet"
4851

4952
# Retrieve dataframe from key
5053
# buffer = BytesIO()

src/dataplane/pipelinerun/data_persist/test_s3_store.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def test_s3_store():
1515

1616
# Dataplane run id
1717
os.environ["DP_RUNID"] = generate('1234567890abcdef', 10)
18+
os.environ["DP_ENVID"] ="E1"
1819

1920
# Data to store as parquet
2021
data = {

0 commit comments

Comments
 (0)