Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 7 additions & 24 deletions src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use tokio::sync::OnceCell as OnceLock;

use crate::{
context::ResolveContext as Ctx,
hashing::{hash_path, IdentityHasher},
package_json::{off_to_location, PackageJson},
path::PathUtil,
FileMetadata, FileSystem, JSONError, ResolveError, ResolveOptions, TsConfig,
Expand Down Expand Up @@ -43,11 +44,7 @@ impl<Fs: Send + Sync + FileSystem> Cache<Fs> {
}

pub fn value(&self, path: &Path) -> CachedPath {
let hash = {
let mut hasher = FxHasher::default();
path.hash(&mut hasher);
hasher.finish()
};
let hash = hash_path(path);
if let Some(cache_entry) = self.paths.get((hash, path).borrow() as &dyn CacheKey) {
return cache_entry.clone();
}
Expand Down Expand Up @@ -116,8 +113,9 @@ impl Hash for CachedPath {
}

impl PartialEq for CachedPath {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.0.path == other.0.path
self.0.hash == other.0.hash && (Arc::ptr_eq(&self.0, &other.0) || self.0.path == other.0.path)
}
}
impl Eq for CachedPath {}
Expand Down Expand Up @@ -390,7 +388,9 @@ impl Hash for dyn CacheKey + '_ {

impl PartialEq for dyn CacheKey + '_ {
fn eq(&self, other: &Self) -> bool {
self.tuple().1 == other.tuple().1
let self_tuple = self.tuple();
let other_tuple = other.tuple();
self_tuple.0 == other_tuple.0 && self_tuple.1 == other_tuple.1
}
}

Expand All @@ -407,20 +407,3 @@ impl<'a> Borrow<dyn CacheKey + 'a> for (u64, &'a Path) {
self
}
}

/// Since the cache key is memoized, use an identity hasher
/// to avoid double cache.
#[derive(Default)]
struct IdentityHasher(u64);

impl Hasher for IdentityHasher {
fn write(&mut self, _: &[u8]) {
unreachable!("Invalid use of IdentityHasher")
}
fn write_u64(&mut self, n: u64) {
self.0 = n;
}
fn finish(&self) -> u64 {
self.0
}
}
30 changes: 30 additions & 0 deletions src/hashing.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use std::{
hash::{Hash, Hasher},
path::Path,
};

use rustc_hash::FxHasher;

#[derive(Default)]
pub struct IdentityHasher(u64);

impl Hasher for IdentityHasher {
fn write(&mut self, _: &[u8]) {
unreachable!("Invalid use of IdentityHasher")
}

fn write_u64(&mut self, n: u64) {
self.0 = n;
}

fn finish(&self) -> u64 {
self.0
}
}

#[inline]
pub(crate) fn hash_path(path: &Path) -> u64 {
let mut hasher = FxHasher::default();
path.hash(&mut hasher);
hasher.finish()
}
113 changes: 113 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ mod cache;
mod context;
mod error;
mod file_system;
mod hashing;
mod options;
mod package_json;
mod path;
Expand All @@ -65,8 +66,10 @@ mod tests;
use std::{
borrow::Cow,
cmp::Ordering,
collections::HashSet,
ffi::OsStr,
fmt,
hash::{BuildHasherDefault, Hash, Hasher},
path::{Component, Path, PathBuf},
sync::{Arc, OnceLock},
};
Expand All @@ -79,6 +82,7 @@ pub use crate::{
builtins::NODEJS_BUILTINS,
error::{JSONError, ResolveError, SpecifierError},
file_system::{FileMetadata, FileSystem, FileSystemOptions, FileSystemOs},
hashing::IdentityHasher,
options::{
Alias, AliasValue, EnforceExtension, ResolveOptions, Restriction, TsconfigOptions,
TsconfigReferences,
Expand All @@ -89,6 +93,7 @@ pub use crate::{
use crate::{
cache::{Cache, CachedPath},
context::ResolveContext as Ctx,
hashing::hash_path,
package_json::JSONMap,
path::{PathUtil, SLASH_START},
specifier::Specifier,
Expand All @@ -107,6 +112,84 @@ pub struct ResolveContext {
pub missing_dependencies: FxHashSet<PathBuf>,
}

pub type PathDependencySet = HashSet<PathDependency, BuildHasherDefault<IdentityHasher>>;

#[derive(Debug, Clone)]
pub struct PathDependency {
path: PathBuf,
hash: u64,
}

impl PathDependency {
#[inline]
pub fn new(path: PathBuf) -> Self {
let hash = hash_path(&path);
Self { path, hash }
}

#[inline]
pub fn path(&self) -> &Path {
&self.path
}

#[inline]
pub fn precomputed_hash(&self) -> u64 {
self.hash
}

#[inline]
pub fn into_path_buf(self) -> PathBuf {
self.path
}
}

impl From<PathBuf> for PathDependency {
#[inline]
fn from(value: PathBuf) -> Self {
Self::new(value)
}
}

impl From<PathDependency> for PathBuf {
#[inline]
fn from(value: PathDependency) -> Self {
value.path
}
}

impl AsRef<Path> for PathDependency {
#[inline]
fn as_ref(&self) -> &Path {
self.path()
}
}

impl Hash for PathDependency {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u64(self.hash);
}
}

impl PartialEq for PathDependency {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.hash == other.hash && self.path == other.path
}
}

impl Eq for PathDependency {}

/// Context returned from the [Resolver::resolve_with_prehashed_context] API.
#[derive(Debug, Default, Clone)]
pub struct ResolvePreHashedContext {
/// Files that was found on file system
pub file_dependencies: PathDependencySet,

/// Dependencies that was not found on file system
pub missing_dependencies: PathDependencySet,
}

/// Resolver with the current operating system as the file system
pub type Resolver = ResolverGeneric<FileSystemOs>;

Expand Down Expand Up @@ -238,6 +321,36 @@ impl<Fs: FileSystem + Send + Sync> ResolverGeneric<Fs> {
result
}

/// Resolve `specifier` at absolute `path` and return dependencies with
/// their precomputed path hash.
///
/// # Errors
///
/// * See [ResolveError]
pub async fn resolve_with_prehashed_context<P: Send + AsRef<Path>>(
&self,
directory: P,
specifier: &str,
resolve_context: &mut ResolvePreHashedContext,
) -> Result<Resolution, ResolveError> {
let mut ctx = Ctx::default();
ctx.init_file_dependencies();
let result = self
.resolve_tracing(directory.as_ref(), specifier, &mut ctx)
.await;
if let Some(deps) = &mut ctx.file_dependencies {
resolve_context
.file_dependencies
.extend(deps.drain(..).map(PathDependency::from));
}
if let Some(deps) = &mut ctx.missing_dependencies {
resolve_context
.missing_dependencies
.extend(deps.drain(..).map(PathDependency::from));
}
result
}

/// Wrap `resolve_impl` with `tracing` information
#[cfg_attr(feature="enable_instrument", tracing::instrument(level=tracing::Level::DEBUG, skip_all, fields(path = %directory.to_string_lossy(), specifier = specifier)))]
async fn resolve_tracing(
Expand Down
49 changes: 45 additions & 4 deletions src/tests/dependencies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,21 @@

#[cfg(not(target_os = "windows"))] // MemoryFS's path separator is always `/` so the test will not pass in windows.
mod windows {
use std::path::PathBuf;
use std::{
hash::{Hash, Hasher},
path::{Path, PathBuf},
};

use rustc_hash::FxHashSet;
use rustc_hash::{FxHashSet, FxHasher};

use super::super::memory_fs::MemoryFS;
use crate::{ResolveContext, ResolveOptions, ResolverGeneric};
use crate::{ResolveContext, ResolveOptions, ResolvePreHashedContext, ResolverGeneric};

fn path_hash(path: &Path) -> u64 {
let mut hasher = FxHasher::default();
path.hash(&mut hasher);
hasher.finish()
}

fn file_system() -> MemoryFS {
MemoryFS::new(&[
Expand Down Expand Up @@ -100,7 +109,7 @@ mod windows {
let mut ctx = ResolveContext::default();
let path = PathBuf::from(context);
let resolved = resolver
.resolve_with_context(path, request, &mut ctx)
.resolve_with_context(&path, request, &mut ctx)
.await
.map(|r| r.full_path());
assert_eq!(resolved, Ok(PathBuf::from(result)));
Expand All @@ -109,6 +118,38 @@ mod windows {
FxHashSet::from_iter(missing_dependencies.iter().map(PathBuf::from));
assert_eq!(ctx.file_dependencies, file_dependencies, "{name}");
assert_eq!(ctx.missing_dependencies, missing_dependencies, "{name}");

let mut prehashed_ctx = ResolvePreHashedContext::default();
let prehashed_resolved = resolver
.resolve_with_prehashed_context(&path, request, &mut prehashed_ctx)
.await
.map(|r| r.full_path());
assert_eq!(prehashed_resolved, Ok(PathBuf::from(result)));
assert!(prehashed_ctx
.file_dependencies
.iter()
.all(|dependency| { dependency.precomputed_hash() == path_hash(dependency.path()) }));
assert!(prehashed_ctx
.missing_dependencies
.iter()
.all(|dependency| { dependency.precomputed_hash() == path_hash(dependency.path()) }));
let prehashed_file_dependencies = FxHashSet::from_iter(
prehashed_ctx
.file_dependencies
.iter()
.map(|d| d.path().to_owned()),
);
let prehashed_missing_dependencies = FxHashSet::from_iter(
prehashed_ctx
.missing_dependencies
.iter()
.map(|d| d.path().to_owned()),
);
assert_eq!(prehashed_file_dependencies, file_dependencies, "{name}");
assert_eq!(
prehashed_missing_dependencies, missing_dependencies,
"{name}"
);
}
}
}
Loading