Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ chrono = { version = "0.4.38", features = ["serde"] }
crc32fast = "1.4.2"
convert_case = "0.8.0"
dashmap = "6"
libc = "0.2"
clipanion = { git = "https://github.com/arcanis/clipanion-rs.git", features = ["serde", "tokens"] }
colored = "3.0.0"
dialoguer = "0.11.0"
Expand Down Expand Up @@ -78,6 +79,7 @@ rayon = "1.10.0"
rkyv = { version = "0.8", features = ["bytecheck"] }
reqwest = { version = "0.12.26", default-features = false, features = ["gzip", "http2", "hickory-dns", "rustls-tls"] }
regex = "1.10.6"
reflink-copy = "0.1.29"
ring = "0.17.14"
rstest = "0.26.1"
serde_plain = "1.0.2"
Expand Down
1 change: 1 addition & 0 deletions packages/zpm-utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,4 @@ indexmap = { workspace = true, features = ["serde"]}
timeago = { workspace = true }
fundu = { workspace = true }
zpm-macro-enum = { workspace = true }
reflink-copy = { workspace = true }
60 changes: 60 additions & 0 deletions packages/zpm-utils/src/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,66 @@ impl Path {
Ok(self)
}

pub fn fs_clonefile(&self, new_path: &Path) -> Result<&Self, PathError> {
#[cfg(target_os = "macos")]
{
let _ = reflink_copy::reflink_or_copy(self.to_path_buf(), new_path.to_path_buf())?;
return Ok(self);
}

#[cfg(target_os = "linux")]
{
use std::os::unix::fs::PermissionsExt;

fn clone_tree_linux(src: &Path, dst: &Path) -> Result<(), PathError> {
let metadata = src.fs_symlink_metadata()?;
let file_type = metadata.file_type();

if file_type.is_symlink() {
let target = src.fs_read_link()?;
dst.fs_create_parent()?;
dst.fs_symlink(&target)?;
return Ok(());
}

if file_type.is_dir() {
dst.fs_create_dir_all()?;

for entry in src.fs_read_dir()? {
let entry = entry?;
let entry_path = Path::try_from(entry.path())?;
let entry_name = Path::try_from(entry.file_name())?;
let entry_dest = dst.with_join(&entry_name);
clone_tree_linux(&entry_path, &entry_dest)?;
}

return Ok(());
}

if file_type.is_file() {
let mode = metadata.permissions().mode();

dst.fs_create_parent()?;
let _ = reflink_copy::reflink_or_copy(src.to_path_buf(), dst.to_path_buf())?;
dst.fs_set_permissions(std::fs::Permissions::from_mode(mode))?;

return Ok(());
}

Err(std::io::Error::new(std::io::ErrorKind::Unsupported, "unsupported file type").into())
}

clone_tree_linux(self, new_path)?;
return Ok(self);
}

#[cfg(not(any(target_os = "macos", target_os = "linux")))]
{
let _ = new_path;
Err(std::io::Error::new(std::io::ErrorKind::Unsupported, "clonefile is only supported on macOS and Linux").into())
}
}

/**
* Move a file or directory to a new location, copying it if the source and
* destination are on different devices.
Expand Down
131 changes: 129 additions & 2 deletions packages/zpm/src/linker/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
use std::{collections::{BTreeMap, BTreeSet}, fs::Permissions, os::unix::fs::PermissionsExt, vec};
use std::{collections::{BTreeMap, BTreeSet}, fs::Permissions, os::unix::fs::PermissionsExt, time::{SystemTime, UNIX_EPOCH}, vec};

use zpm_formats::iter_ext::IterExt;
use zpm_parsers::JsonDocument;
use zpm_primitives::{Descriptor, FilterDescriptor, Locator};
use zpm_utils::{Path, PathError, System};
use zpm_utils::{Path, PathError, System, ToFileString};
use sha2::{Digest, Sha512};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use hex;

use crate::{
build,
Expand Down Expand Up @@ -120,6 +122,131 @@ pub fn fs_extract_archive(destination: &Path, package_data: &PackageData) -> Res
}
}

// Helper function to compute SHA512 hash and return as hex string
fn compute_sha512_hex(input: &str) -> String {
let mut hasher = Sha512::new();
hasher.update(input.as_bytes());
hex::encode(hasher.finalize())
}

// Generates a Yarn Berry-compatible hash. Used for Sharp packages
pub fn yarn_berry_hash(locator: &Locator) -> Result<String, Error> {
let package_version = locator.reference.to_file_string();

// Extract scope without '@' prefix, or empty string if no scope
let package_scope = locator.ident.scope()
.and_then(|scope| scope.strip_prefix('@'))
.unwrap_or("");

// Step 1: Hash the package identifier (scope + name)
let package_identifier = format!("{}{}", package_scope, locator.ident.name());
let identifier_hash = compute_sha512_hex(&package_identifier);

// Step 2: Hash the combination of identifier hash and version
let combined_input = format!("{}{}", identifier_hash, package_version);
let final_hash = compute_sha512_hex(&combined_input);

// Return first 10 characters to match Yarn Berry's hash length
Ok(final_hash[..10].to_string())
}

pub fn fs_materialize_unplugged_from_global_cache(project: &Project, locator: &Locator, dest_wrapper: &Path, dest_package_root: &Path, package_data: &PackageData) -> Result<bool, Error> {
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
{
let _ = (project, locator, dest_wrapper);
return fs_extract_archive(dest_package_root, package_data);
}

#[cfg(any(target_os = "macos", target_os = "linux"))]
{
let dest_ready = dest_package_root
.with_join_str(".ready");

if dest_ready.fs_exists() {
return Ok(false);
}

let PackageData::Zip {..} = package_data else {
return Ok(false);
};

let global_base = project
.global_unplugged_path();

global_base
.fs_create_dir_all()?;

let physical = locator
.physical_locator();

let global_wrapper_name = format!(
"{}-{}-{}",
physical.ident.slug(),
physical.reference.slug(),
yarn_berry_hash(&physical)?,
);

let global_wrapper = global_base
.with_join_str(&global_wrapper_name);

let package_subpath = package_data
.package_subpath();

let global_package_root = global_wrapper
.with_join(&package_subpath);

let global_ready = global_package_root
.with_join_str(".ready");

if !global_ready.fs_exists() {
let nonce = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos();

let tmp_wrapper = global_base.with_join_str(format!(
".{}.tmp-{}-{}",
global_wrapper_name,
std::process::id(),
nonce,
));

let tmp_package_root = tmp_wrapper
.with_join(&package_subpath);

if fs_extract_archive(&tmp_package_root, package_data).is_ok() {
let _ = tmp_wrapper
.fs_concurrent_move(&global_wrapper);
}

if !global_ready.fs_exists() {
let _ = tmp_wrapper.fs_rm();
return fs_extract_archive(dest_package_root, package_data);
}

let _ = tmp_wrapper.fs_rm();
}

if dest_wrapper.fs_exists() && !dest_ready.fs_exists() {
dest_wrapper.fs_rm()?;
}

dest_wrapper
.fs_create_parent()?;

match global_wrapper.fs_clonefile(dest_wrapper) {
Ok(_) => Ok(true),
Err(_) => {
if dest_wrapper.fs_exists() {
let _ = dest_wrapper.fs_rm();
}

fs_extract_archive(dest_package_root, package_data)
},
}
}
}

pub fn populate_build_entry_dependencies(package_build_entries: &BTreeMap<Locator, usize>, locator_resolutions: &BTreeMap<Locator, Resolution>, descriptor_to_locator: &BTreeMap<Descriptor, Locator>) -> Result<BTreeMap<usize, BTreeSet<usize>>, Error> {
let mut package_build_dependencies
= BTreeMap::new();
Expand Down
Loading