|
597 | 597 |
|
598 | 598 | print(" ✓ File content verified correct (hash matches)") |
599 | 599 |
|
| 600 | + @setup_s3() |
| 601 | + def test_multipart_upload_basic(bucket): |
| 602 | + """Test basic multipart upload with a large file""" |
| 603 | + print("\n--- Test: Multipart Upload Basic ---") |
| 604 | +
|
| 605 | + large_file_size = 10 * 1024 * 1024 |
| 606 | + large_pkg = server.succeed( |
| 607 | + "nix-store --add $(dd if=/dev/urandom of=/tmp/large-file bs=1M count=10 2>/dev/null && echo /tmp/large-file)" |
| 608 | + ).strip() |
| 609 | +
|
| 610 | + chunk_size = 5 * 1024 * 1024 |
| 611 | + expected_parts = 3 # 10 MB raw becomes ~10.5 MB compressed (NAR + xz overhead) |
| 612 | +
|
| 613 | + store_url = make_s3_url( |
| 614 | + bucket, |
| 615 | + **{ |
| 616 | + "multipart-upload": "true", |
| 617 | + "multipart-threshold": str(5 * 1024 * 1024), |
| 618 | + "multipart-chunk-size": str(chunk_size), |
| 619 | + } |
| 620 | + ) |
| 621 | +
|
| 622 | + print(f" Uploading {large_file_size} byte file (expect {expected_parts} parts)") |
| 623 | + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {large_pkg} --debug 2>&1") |
| 624 | +
|
| 625 | + if "using S3 multipart upload" not in output: |
| 626 | + raise Exception("Expected multipart upload to be used") |
| 627 | +
|
| 628 | + expected_msg = f"{expected_parts} parts uploaded" |
| 629 | + if expected_msg not in output: |
| 630 | + print("Debug output:") |
| 631 | + print(output) |
| 632 | + raise Exception(f"Expected '{expected_msg}' in output") |
| 633 | +
|
| 634 | + print(f" ✓ Multipart upload used with {expected_parts} parts") |
| 635 | +
|
| 636 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' {large_pkg} --no-check-sigs") |
| 637 | + verify_packages_in_store(client, large_pkg, should_exist=True) |
| 638 | +
|
| 639 | + print(" ✓ Large file downloaded and verified") |
| 640 | +
|
| 641 | + @setup_s3() |
| 642 | + def test_multipart_threshold(bucket): |
| 643 | + """Test that files below threshold use regular upload""" |
| 644 | + print("\n--- Test: Multipart Threshold Behavior ---") |
| 645 | +
|
| 646 | + store_url = make_s3_url( |
| 647 | + bucket, |
| 648 | + **{ |
| 649 | + "multipart-upload": "true", |
| 650 | + "multipart-threshold": str(1024 * 1024 * 1024), |
| 651 | + } |
| 652 | + ) |
| 653 | +
|
| 654 | + print(" Uploading small file with high threshold") |
| 655 | + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']} --debug 2>&1") |
| 656 | +
|
| 657 | + if "using S3 multipart upload" in output: |
| 658 | + raise Exception("Should not use multipart for file below threshold") |
| 659 | +
|
| 660 | + if "using S3 regular upload" not in output: |
| 661 | + raise Exception("Expected regular upload to be used") |
| 662 | +
|
| 663 | + print(" ✓ Regular upload used for file below threshold") |
| 664 | +
|
| 665 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --no-check-sigs --from '{store_url}' {PKGS['A']}") |
| 666 | + verify_packages_in_store(client, PKGS['A'], should_exist=True) |
| 667 | +
|
| 668 | + print(" ✓ Small file uploaded and verified") |
| 669 | +
|
600 | 670 | # ============================================================================ |
601 | 671 | # Main Test Execution |
602 | 672 | # ============================================================================ |
|
626 | 696 | test_compression_mixed() |
627 | 697 | test_compression_disabled() |
628 | 698 | test_nix_prefetch_url() |
| 699 | + test_multipart_upload_basic() |
| 700 | + test_multipart_threshold() |
629 | 701 |
|
630 | 702 | print("\n" + "="*80) |
631 | 703 | print("✓ All S3 Binary Cache Store Tests Passed!") |
|
0 commit comments