diff options
author | Florian Klink <flokli@flokli.de> | 2024-08-20T13·52+0300 |
---|---|---|
committer | clbot <clbot@tvl.fyi> | 2024-08-20T15·14+0000 |
commit | 2beabe968ca70ce2aef8def08d7dab7340979ea6 (patch) | |
tree | 5d8521dc0228451960c6c77fd094e3a90a39fae6 | |
parent | 413135b9252de65c61045ade984de7b4d3319c2d (diff) |
refactor(nix-compat/store_path): make StorePath generic on S r/8545
Similar to how cl/12253 already did this for `Signature`, we apply the same logic to `StorePath`. `StorePathRef<'a>'` is now a `StorePath<&'a str>`, and there's less redundant code for the two different implementation. `.as_ref()` returns a `StorePathRef<'_>`, `.to_owned()` gives a `StorePath<String>` (for now). I briefly thought about only publicly exporting `StorePath<String>` as `StorePath`, but the diff is not too large and this will make it easier to gradually introduce more flexibility in which store paths to accept. Also, remove some silliness in `StorePath::from_absolute_path_full`, which now doesn't allocate anymore. Change-Id: Ife8843857a1a0a3a99177ca997649fd45b8198e6 Reviewed-on: https://cl.tvl.fyi/c/depot/+/12258 Autosubmit: flokli <flokli@flokli.de> Tested-by: BuildkiteCI Reviewed-by: Connor Brewster <cbrewster@hey.com>
-rw-r--r-- | tvix/glue/src/builtins/derivation.rs | 2 | ||||
-rw-r--r-- | tvix/glue/src/builtins/import.rs | 20 | ||||
-rw-r--r-- | tvix/glue/src/known_paths.rs | 36 | ||||
-rw-r--r-- | tvix/glue/src/tvix_store_io.rs | 30 | ||||
-rw-r--r-- | tvix/nix-compat/src/derivation/mod.rs | 12 | ||||
-rw-r--r-- | tvix/nix-compat/src/derivation/output.rs | 7 | ||||
-rw-r--r-- | tvix/nix-compat/src/derivation/parse_error.rs | 2 | ||||
-rw-r--r-- | tvix/nix-compat/src/derivation/parser.rs | 57 | ||||
-rw-r--r-- | tvix/nix-compat/src/derivation/write.rs | 16 | ||||
-rw-r--r-- | tvix/nix-compat/src/store_path/mod.rs | 323 | ||||
-rw-r--r-- | tvix/nix-compat/src/store_path/utils.rs | 42 | ||||
-rw-r--r-- | tvix/store/src/import.rs | 6 | ||||
-rw-r--r-- | tvix/store/src/pathinfoservice/fs/mod.rs | 3 | ||||
-rw-r--r-- | tvix/store/src/proto/mod.rs | 10 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/pathinfo.rs | 8 |
15 files changed, 280 insertions, 294 deletions
diff --git a/tvix/glue/src/builtins/derivation.rs b/tvix/glue/src/builtins/derivation.rs index 6a61b0dbaf36..5bc9dab71283 100644 --- a/tvix/glue/src/builtins/derivation.rs +++ b/tvix/glue/src/builtins/derivation.rs @@ -568,7 +568,7 @@ pub(crate) mod derivation_builtins { let blob_digest = blob_writer.close().await?; let ca_hash = CAHash::Text(Sha256::digest(&content).into()); - let store_path = + let store_path: StorePathRef = build_ca_path(name.to_str()?, &ca_hash, content.iter_ctx_plain(), false) .map_err(|_e| { nix_compat::derivation::DerivationError::InvalidOutputName( diff --git a/tvix/glue/src/builtins/import.rs b/tvix/glue/src/builtins/import.rs index 3660f575c843..34ae2778ecdd 100644 --- a/tvix/glue/src/builtins/import.rs +++ b/tvix/glue/src/builtins/import.rs @@ -112,7 +112,7 @@ mod import_builtins { use crate::tvix_store_io::TvixStoreIO; use nix_compat::nixhash::{CAHash, NixHash}; - use nix_compat::store_path::StorePath; + use nix_compat::store_path::StorePathRef; use sha2::Digest; use tokio::io::AsyncWriteExt; use tvix_eval::builtins::coerce_value_to_path; @@ -377,16 +377,16 @@ mod import_builtins { } })?; - let path_exists = if let Ok((store_path, sub_path)) = StorePath::from_absolute_path_full(p) - { - if !sub_path.as_os_str().is_empty() { - false + let path_exists = + if let Ok((store_path, sub_path)) = StorePathRef::from_absolute_path_full(p) { + if !sub_path.as_os_str().is_empty() { + false + } else { + state.store_path_exists(store_path.as_ref()).await? + } } else { - state.store_path_exists(store_path.as_ref()).await? - } - } else { - false - }; + false + }; if !path_exists { return Err(ImportError::PathNotInStore(p.into()).into()); diff --git a/tvix/glue/src/known_paths.rs b/tvix/glue/src/known_paths.rs index b303b8a13520..239acca9829a 100644 --- a/tvix/glue/src/known_paths.rs +++ b/tvix/glue/src/known_paths.rs @@ -25,27 +25,27 @@ pub struct KnownPaths { /// /// Keys are derivation paths, values are a tuple of the "hash derivation /// modulo" and the Derivation struct itself. - derivations: HashMap<StorePath, ([u8; 32], Derivation)>, + derivations: HashMap<StorePath<String>, ([u8; 32], Derivation)>, /// A map from output path to (one) drv path. /// Note that in the case of FODs, multiple drvs can produce the same output /// path. We use one of them. - outputs_to_drvpath: HashMap<StorePath, StorePath>, + outputs_to_drvpath: HashMap<StorePath<String>, StorePath<String>>, /// A map from output path to fetches (and their names). - outputs_to_fetches: HashMap<StorePath, (String, Fetch)>, + outputs_to_fetches: HashMap<StorePath<String>, (String, Fetch)>, } impl KnownPaths { /// Fetch the opaque "hash derivation modulo" for a given derivation path. - pub fn get_hash_derivation_modulo(&self, drv_path: &StorePath) -> Option<&[u8; 32]> { + pub fn get_hash_derivation_modulo(&self, drv_path: &StorePath<String>) -> Option<&[u8; 32]> { self.derivations .get(drv_path) .map(|(hash_derivation_modulo, _derivation)| hash_derivation_modulo) } /// Return a reference to the Derivation for a given drv path. - pub fn get_drv_by_drvpath(&self, drv_path: &StorePath) -> Option<&Derivation> { + pub fn get_drv_by_drvpath(&self, drv_path: &StorePath<String>) -> Option<&Derivation> { self.derivations .get(drv_path) .map(|(_hash_derivation_modulo, derivation)| derivation) @@ -54,7 +54,10 @@ impl KnownPaths { /// Return the drv path of the derivation producing the passed output path. /// Note there can be multiple Derivations producing the same output path in /// flight; this function will only return one of them. - pub fn get_drv_path_for_output_path(&self, output_path: &StorePath) -> Option<&StorePath> { + pub fn get_drv_path_for_output_path( + &self, + output_path: &StorePath<String>, + ) -> Option<&StorePath<String>> { self.outputs_to_drvpath.get(output_path) } @@ -63,7 +66,7 @@ impl KnownPaths { /// be fully calculated. /// All input derivations this refers to must also be inserted to this /// struct. - pub fn add_derivation(&mut self, drv_path: StorePath, drv: Derivation) { + pub fn add_derivation(&mut self, drv_path: StorePath<String>, drv: Derivation) { // check input derivations to have been inserted. #[cfg(debug_assertions)] { @@ -124,14 +127,17 @@ impl KnownPaths { /// Return the name and fetch producing the passed output path. /// Note there can also be (multiple) Derivations producing the same output path. - pub fn get_fetch_for_output_path(&self, output_path: &StorePath) -> Option<(String, Fetch)> { + pub fn get_fetch_for_output_path( + &self, + output_path: &StorePath<String>, + ) -> Option<(String, Fetch)> { self.outputs_to_fetches .get(output_path) .map(|(name, fetch)| (name.to_owned(), fetch.to_owned())) } /// Returns an iterator over all known derivations and their store path. - pub fn get_derivations(&self) -> impl Iterator<Item = (&StorePath, &Derivation)> { + pub fn get_derivations(&self) -> impl Iterator<Item = (&StorePath<String>, &Derivation)> { self.derivations.iter().map(|(k, v)| (k, &v.1)) } } @@ -156,26 +162,26 @@ mod tests { "tests/ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv" )) .expect("must parse"); - static ref BAR_DRV_PATH: StorePath = + static ref BAR_DRV_PATH: StorePath<String> = StorePath::from_bytes(b"ss2p4wmxijn652haqyd7dckxwl4c7hxx-bar.drv").expect("must parse"); - static ref FOO_DRV_PATH: StorePath = + static ref FOO_DRV_PATH: StorePath<String> = StorePath::from_bytes(b"ch49594n9avinrf8ip0aslidkc4lxkqv-foo.drv").expect("must parse"); - static ref BAR_OUT_PATH: StorePath = + static ref BAR_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"mp57d33657rf34lzvlbpfa1gjfv5gmpg-bar").expect("must parse"); - static ref FOO_OUT_PATH: StorePath = + static ref FOO_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse"); static ref FETCH_URL : Fetch = Fetch::URL{ url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(), exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()) }; - static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap(); + static ref FETCH_URL_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap(); static ref FETCH_TARBALL : Fetch = Fetch::Tarball{ url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(), exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap()) }; - static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap(); + static ref FETCH_TARBALL_OUT_PATH: StorePath<String> = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap(); } /// ensure we don't allow acdding a Derivation that depends on another, diff --git a/tvix/glue/src/tvix_store_io.rs b/tvix/glue/src/tvix_store_io.rs index 0e12c261f575..93675b438f25 100644 --- a/tvix/glue/src/tvix_store_io.rs +++ b/tvix/glue/src/tvix_store_io.rs @@ -105,7 +105,7 @@ impl TvixStoreIO { #[instrument(skip(self, store_path), fields(store_path=%store_path, indicatif.pb_show=1), ret(level = Level::TRACE), err)] async fn store_path_to_node( &self, - store_path: &StorePath, + store_path: &StorePath<String>, sub_path: &Path, ) -> io::Result<Option<Node>> { // Find the root node for the store_path. @@ -213,7 +213,7 @@ impl TvixStoreIO { }; // convert output names to actual paths - let output_paths: Vec<StorePath> = output_names + let output_paths: Vec<StorePath<String>> = output_names .iter() .map(|output_name| { input_drv @@ -372,13 +372,13 @@ impl TvixStoreIO { .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e)) } - pub(crate) async fn node_to_path_info( + pub(crate) async fn node_to_path_info<'a>( &self, - name: &str, + name: &'a str, path: &Path, ca: &CAHash, root_node: Node, - ) -> io::Result<(PathInfo, NixHash, StorePath)> { + ) -> io::Result<(PathInfo, NixHash, StorePathRef<'a>)> { // Ask the PathInfoService for the NAR size and sha256 // We always need it no matter what is the actual hash mode // because the path info construct a narinfo which *always* @@ -411,20 +411,16 @@ impl TvixStoreIO { root_node, ); - Ok(( - path_info, - NixHash::Sha256(nar_sha256), - output_path.to_owned(), - )) + Ok((path_info, NixHash::Sha256(nar_sha256), output_path)) } - pub(crate) async fn register_node_in_path_info_service( + pub(crate) async fn register_node_in_path_info_service<'a>( &self, - name: &str, + name: &'a str, path: &Path, ca: &CAHash, root_node: Node, - ) -> io::Result<StorePath> { + ) -> io::Result<StorePathRef<'a>> { let (path_info, _, output_path) = self.node_to_path_info(name, path, ca, root_node).await?; let _path_info = self.path_info_service.as_ref().put(path_info).await?; @@ -449,7 +445,7 @@ impl EvalIO for TvixStoreIO { { if self .tokio_handle - .block_on(self.store_path_to_node(&store_path, &sub_path))? + .block_on(self.store_path_to_node(&store_path, sub_path))? .is_some() { Ok(true) @@ -471,7 +467,7 @@ impl EvalIO for TvixStoreIO { { if let Some(node) = self .tokio_handle - .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })? + .block_on(async { self.store_path_to_node(&store_path, sub_path).await })? { // depending on the node type, treat open differently match node { @@ -527,7 +523,7 @@ impl EvalIO for TvixStoreIO { { if let Some(node) = self .tokio_handle - .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })? + .block_on(async { self.store_path_to_node(&store_path, sub_path).await })? { match node { Node::Directory { .. } => Ok(FileType::Directory), @@ -549,7 +545,7 @@ impl EvalIO for TvixStoreIO { { if let Some(node) = self .tokio_handle - .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })? + .block_on(async { self.store_path_to_node(&store_path, sub_path).await })? { match node { Node::Directory { digest, .. } => { diff --git a/tvix/nix-compat/src/derivation/mod.rs b/tvix/nix-compat/src/derivation/mod.rs index 6e12e3ea86e6..6baeaba38299 100644 --- a/tvix/nix-compat/src/derivation/mod.rs +++ b/tvix/nix-compat/src/derivation/mod.rs @@ -36,11 +36,11 @@ pub struct Derivation { /// Map from drv path to output names used from this derivation. #[serde(rename = "inputDrvs")] - pub input_derivations: BTreeMap<StorePath, BTreeSet<String>>, + pub input_derivations: BTreeMap<StorePath<String>, BTreeSet<String>>, /// Plain store paths of additional inputs. #[serde(rename = "inputSrcs")] - pub input_sources: BTreeSet<StorePath>, + pub input_sources: BTreeSet<StorePath<String>>, /// Maps output names to Output. pub outputs: BTreeMap<String, Output>, @@ -127,7 +127,10 @@ impl Derivation { /// the `name` with a `.drv` suffix as name, all [Derivation::input_sources] and /// keys of [Derivation::input_derivations] as references, and the ATerm string of /// the [Derivation] as content. - pub fn calculate_derivation_path(&self, name: &str) -> Result<StorePath, DerivationError> { + pub fn calculate_derivation_path( + &self, + name: &str, + ) -> Result<StorePath<String>, DerivationError> { // append .drv to the name let name = &format!("{}.drv", name); @@ -141,7 +144,6 @@ impl Derivation { .collect(); build_text_path(name, self.to_aterm_bytes(), references) - .map(|s| s.to_owned()) .map_err(|_e| DerivationError::InvalidOutputName(name.to_string())) } @@ -210,7 +212,7 @@ impl Derivation { self.input_derivations .iter() .map(|(drv_path, output_names)| { - let hash = fn_lookup_hash_derivation_modulo(&drv_path.into()); + let hash = fn_lookup_hash_derivation_modulo(&drv_path.as_ref()); (hash, output_names.to_owned()) }), diff --git a/tvix/nix-compat/src/derivation/output.rs b/tvix/nix-compat/src/derivation/output.rs index 266617f587f8..0b81ef3c3155 100644 --- a/tvix/nix-compat/src/derivation/output.rs +++ b/tvix/nix-compat/src/derivation/output.rs @@ -1,5 +1,4 @@ use crate::nixhash::CAHash; -use crate::store_path::StorePathRef; use crate::{derivation::OutputError, store_path::StorePath}; use serde::de::Unexpected; use serde::{Deserialize, Serialize}; @@ -10,7 +9,7 @@ use std::borrow::Cow; #[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)] pub struct Output { /// Store path of build result. - pub path: Option<StorePath>, + pub path: Option<StorePath<String>>, #[serde(flatten)] pub ca_hash: Option<CAHash>, // we can only represent a subset here. @@ -33,10 +32,10 @@ impl<'de> Deserialize<'de> for Output { &"a string", ))?; - let path = StorePathRef::from_absolute_path(path.as_bytes()) + let path = StorePath::from_absolute_path(path.as_bytes()) .map_err(|_| serde::de::Error::invalid_value(Unexpected::Str(path), &"StorePath"))?; Ok(Self { - path: Some(path.to_owned()), + path: Some(path), ca_hash: CAHash::from_map::<D>(&fields)?, }) } diff --git a/tvix/nix-compat/src/derivation/parse_error.rs b/tvix/nix-compat/src/derivation/parse_error.rs index fc97f1a9883b..f625d9aeb724 100644 --- a/tvix/nix-compat/src/derivation/parse_error.rs +++ b/tvix/nix-compat/src/derivation/parse_error.rs @@ -20,7 +20,7 @@ pub enum ErrorKind { DuplicateInputDerivationOutputName(String, String), #[error("duplicate input source: {0}")] - DuplicateInputSource(StorePath), + DuplicateInputSource(StorePath<String>), #[error("nix hash error: {0}")] NixHashError(nixhash::Error), diff --git a/tvix/nix-compat/src/derivation/parser.rs b/tvix/nix-compat/src/derivation/parser.rs index 7fc56389d17a..4fff7181ba40 100644 --- a/tvix/nix-compat/src/derivation/parser.rs +++ b/tvix/nix-compat/src/derivation/parser.rs @@ -13,7 +13,7 @@ use thiserror; use crate::derivation::parse_error::{into_nomerror, ErrorKind, NomError, NomResult}; use crate::derivation::{write, CAHash, Derivation, Output}; -use crate::store_path::{self, StorePath, StorePathRef}; +use crate::store_path::{self, StorePath}; use crate::{aterm, nixhash}; #[derive(Debug, thiserror::Error)] @@ -101,7 +101,7 @@ fn parse_output(i: &[u8]) -> NomResult<&[u8], (String, Output)> { path: if output_path.is_empty() { None } else { - Some(string_to_store_path(i, output_path)?) + Some(string_to_store_path(i, &output_path)?) }, ca_hash: hash_with_mode, }, @@ -131,12 +131,12 @@ fn parse_outputs(i: &[u8]) -> NomResult<&[u8], BTreeMap<String, Output>> { match res { Ok((rst, outputs_lst)) => { - let mut outputs: BTreeMap<String, Output> = BTreeMap::default(); + let mut outputs = BTreeMap::default(); for (output_name, output) in outputs_lst.into_iter() { if outputs.contains_key(&output_name) { return Err(nom::Err::Failure(NomError { input: i, - code: ErrorKind::DuplicateMapKey(output_name), + code: ErrorKind::DuplicateMapKey(output_name.to_string()), })); } outputs.insert(output_name, output); @@ -148,11 +148,13 @@ fn parse_outputs(i: &[u8]) -> NomResult<&[u8], BTreeMap<String, Output>> { } } -fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTreeSet<String>>> { +fn parse_input_derivations( + i: &[u8], +) -> NomResult<&[u8], BTreeMap<StorePath<String>, BTreeSet<String>>> { let (i, input_derivations_list) = parse_kv(aterm::parse_string_list)(i)?; // This is a HashMap of drv paths to a list of output names. - let mut input_derivations: BTreeMap<StorePath, BTreeSet<_>> = BTreeMap::new(); + let mut input_derivations: BTreeMap<StorePath<String>, BTreeSet<_>> = BTreeMap::new(); for (input_derivation, output_names) in input_derivations_list { let mut new_output_names = BTreeSet::new(); @@ -169,7 +171,7 @@ fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTr new_output_names.insert(output_name); } - let input_derivation: StorePath = string_to_store_path(i, input_derivation)?; + let input_derivation = string_to_store_path(i, input_derivation.as_str())?; input_derivations.insert(input_derivation, new_output_names); } @@ -177,16 +179,16 @@ fn parse_input_derivations(i: &[u8]) -> NomResult<&[u8], BTreeMap<StorePath, BTr Ok((i, input_derivations)) } -fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath>> { +fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath<String>>> { let (i, input_sources_lst) = aterm::parse_string_list(i).map_err(into_nomerror)?; let mut input_sources: BTreeSet<_> = BTreeSet::new(); for input_source in input_sources_lst.into_iter() { - let input_source: StorePath = string_to_store_path(i, input_source)?; + let input_source = string_to_store_path(i, input_source.as_str())?; if input_sources.contains(&input_source) { return Err(nom::Err::Failure(NomError { input: i, - code: ErrorKind::DuplicateInputSource(input_source), + code: ErrorKind::DuplicateInputSource(input_source.to_owned()), })); } else { input_sources.insert(input_source); @@ -196,24 +198,27 @@ fn parse_input_sources(i: &[u8]) -> NomResult<&[u8], BTreeSet<StorePath>> { Ok((i, input_sources)) } -fn string_to_store_path( - i: &[u8], - path_str: String, -) -> Result<StorePath, nom::Err<NomError<&[u8]>>> { - #[cfg(debug_assertions)] - let path_str2 = path_str.clone(); - - let path: StorePath = StorePathRef::from_absolute_path(path_str.as_bytes()) - .map_err(|e: store_path::Error| { +fn string_to_store_path<'a, 'i, S>( + i: &'i [u8], + path_str: &'a str, +) -> Result<StorePath<S>, nom::Err<NomError<&'i [u8]>>> +where + S: std::cmp::Eq + + std::fmt::Display + + std::clone::Clone + + std::ops::Deref<Target = str> + + std::convert::From<&'a str>, +{ + let path = + StorePath::from_absolute_path(path_str.as_bytes()).map_err(|e: store_path::Error| { nom::Err::Failure(NomError { input: i, code: e.into(), }) - })? - .to_owned(); + })?; #[cfg(debug_assertions)] - assert_eq!(path_str2, path.to_absolute_path()); + assert_eq!(path_str, path.to_absolute_path()); Ok(path) } @@ -375,11 +380,11 @@ mod tests { }; static ref EXP_AB_MAP: BTreeMap<String, BString> = { let mut b = BTreeMap::new(); - b.insert("a".to_string(), b"1".as_bstr().to_owned()); - b.insert("b".to_string(), b"2".as_bstr().to_owned()); + b.insert("a".to_string(), b"1".into()); + b.insert("b".to_string(), b"2".into()); b }; - static ref EXP_INPUT_DERIVATIONS_SIMPLE: BTreeMap<StorePath, BTreeSet<String>> = { + static ref EXP_INPUT_DERIVATIONS_SIMPLE: BTreeMap<StorePath<String>, BTreeSet<String>> = { let mut b = BTreeMap::new(); b.insert( StorePath::from_bytes(b"8bjm87p310sb7r2r0sg4xrynlvg86j8k-hello-2.12.1.tar.gz.drv") @@ -452,7 +457,7 @@ mod tests { #[case::simple(EXP_INPUT_DERIVATIONS_SIMPLE_ATERM.as_bytes(), &EXP_INPUT_DERIVATIONS_SIMPLE)] fn parse_input_derivations( #[case] input: &'static [u8], - #[case] expected: &BTreeMap<StorePath, BTreeSet<String>>, + #[case] expected: &BTreeMap<StorePath<String>, BTreeSet<String>>, ) { let (rest, parsed) = super::parse_input_derivations(input).expect("must parse"); diff --git a/tvix/nix-compat/src/derivation/write.rs b/tvix/nix-compat/src/derivation/write.rs index 2ff68b6edba8..42dadcd76064 100644 --- a/tvix/nix-compat/src/derivation/write.rs +++ b/tvix/nix-compat/src/derivation/write.rs @@ -6,7 +6,7 @@ use crate::aterm::escape_bytes; use crate::derivation::{ca_kind_prefix, output::Output}; use crate::nixbase32; -use crate::store_path::{StorePath, StorePathRef, STORE_DIR_WITH_SLASH}; +use crate::store_path::{StorePath, STORE_DIR_WITH_SLASH}; use bstr::BString; use data_encoding::HEXLOWER; @@ -34,7 +34,10 @@ pub(crate) trait AtermWriteable { fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()>; } -impl AtermWriteable for StorePathRef<'_> { +impl<S> AtermWriteable for StorePath<S> +where + S: std::cmp::Eq + std::ops::Deref<Target = str>, +{ fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> { write_char(writer, QUOTE)?; writer.write_all(STORE_DIR_WITH_SLASH.as_bytes())?; @@ -46,13 +49,6 @@ impl AtermWriteable for StorePathRef<'_> { } } -impl AtermWriteable for StorePath { - fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> { - let r: StorePathRef = self.into(); - r.aterm_write(writer) - } -} - impl AtermWriteable for String { fn aterm_write(&self, writer: &mut impl Write) -> std::io::Result<()> { write_field(writer, self, true) @@ -179,7 +175,7 @@ pub(crate) fn write_input_derivations( pub(crate) fn write_input_sources( writer: &mut impl Write, - input_sources: &BTreeSet<StorePath>, + input_sources: &BTreeSet<StorePath<String>>, ) -> Result<(), io::Error> { write_char(writer, BRACKET_OPEN)?; write_array_elements( diff --git a/tvix/nix-compat/src/store_path/mod.rs b/tvix/nix-compat/src/store_path/mod.rs index 707c41a92d74..177cc96ce20f 100644 --- a/tvix/nix-compat/src/store_path/mod.rs +++ b/tvix/nix-compat/src/store_path/mod.rs @@ -2,14 +2,15 @@ use crate::nixbase32; use data_encoding::{DecodeError, BASE64}; use serde::{Deserialize, Serialize}; use std::{ - fmt, - path::PathBuf, + fmt::{self, Display}, + ops::Deref, + path::Path, str::{self, FromStr}, }; use thiserror; #[cfg(target_family = "unix")] -use std::os::unix::ffi::OsStringExt; +use std::os::unix::ffi::OsStrExt; mod utils; @@ -54,17 +55,26 @@ pub enum Error { /// A [StorePath] does not encode any additional subpath "inside" the store /// path. #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct StorePath { +pub struct StorePath<S> +where + S: std::cmp::Eq + std::cmp::PartialEq, +{ digest: [u8; DIGEST_SIZE], - name: Box<str>, + name: S, } +/// Like [StorePath], but without a heap allocation for the name. +/// Used by [StorePath] for parsing. +pub type StorePathRef<'a> = StorePath<&'a str>; -impl StorePath { +impl<S> StorePath<S> +where + S: std::cmp::Eq + Deref<Target = str>, +{ pub fn digest(&self) -> &[u8; DIGEST_SIZE] { &self.digest } - pub fn name(&self) -> &str { + pub fn name(&self) -> &S { &self.name } @@ -74,60 +84,101 @@ impl StorePath { name: &self.name, } } -} -impl PartialOrd for StorePath { - fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { - Some(self.cmp(other)) + pub fn to_owned(&self) -> StorePath<String> { + StorePath { + digest: self.digest, + name: self.name.to_string(), + } } -} -/// `StorePath`s are sorted by their reverse digest to match the sorting order -/// of the nixbase32-encoded string. -impl Ord for StorePath { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.as_ref().cmp(&other.as_ref()) + /// Construct a [StorePath] by passing the `$digest-$name` string + /// that comes after [STORE_DIR_WITH_SLASH]. + pub fn from_bytes<'a>(s: &'a [u8]) -> Result<Self, Error> + where + S: From<&'a str>, + { + // the whole string needs to be at least: + // + // - 32 characters (encoded hash) + // - 1 dash + // - 1 character for the name + if s.len() < ENCODED_DIGEST_SIZE + 2 { + Err(Error::InvalidLength)? + } + + let digest = nixbase32::decode_fixed(&s[..ENCODED_DIGEST_SIZE])?; + + if s[ENCODED_DIGEST_SIZE] != b'-' { + return Err(Error::MissingDash); + } + + Ok(StorePath { + digest, + name: validate_name(&s[ENCODED_DIGEST_SIZE + 1..])?.into(), + }) } -} -impl FromStr for StorePath { - type Err = Error; + /// Construct a [StorePathRef] from a name and digest. + /// The name is validated, and the digest checked for size. + pub fn from_name_and_digest<'a>(name: &'a str, digest: &[u8]) -> Result<Self, Error> + where + S: From<&'a str>, + { + let digest_fixed = digest.try_into().map_err(|_| Error::InvalidLength)?; + Self::from_name_and_digest_fixed(name, digest_fixed) + } - /// Construct a [StorePath] by passing the `$digest-$name` string - /// that comes after [STORE_DIR_WITH_SLASH]. - fn from_str(s: &str) -> Result<Self, Self::Err> { - Self::from_bytes(s.as_bytes()) + /// Construct a [StorePathRef] from a name and digest of correct length. + /// The name is validated. + pub fn from_name_and_digest_fixed<'a>( + name: &'a str, + digest: [u8; DIGEST_SIZE], + ) -> Result<Self, Error> + where + S: From<&'a str>, + { + Ok(Self { + name: validate_name(name.as_bytes())?.into(), + digest, + }) } -} -impl StorePath { - /// Construct a [StorePath] by passing the `$digest-$name` string - /// that comes after [STORE_DIR_WITH_SLASH]. - pub fn from_bytes(s: &[u8]) -> Result<StorePath, Error> { - Ok(StorePathRef::from_bytes(s)?.to_owned()) + /// Construct a [StorePathRef] from an absolute store path string. + /// This is equivalent to calling [StorePathRef::from_bytes], but stripping + /// the [STORE_DIR_WITH_SLASH] prefix before. + pub fn from_absolute_path<'a>(s: &'a [u8]) -> Result<Self, Error> + where + S: From<&'a str>, + { + match s.strip_prefix(STORE_DIR_WITH_SLASH.as_bytes()) { + Some(s_stripped) => Self::from_bytes(s_stripped), + None => Err(Error::MissingStoreDir), + } } /// Decompose a string into a [StorePath] and a [PathBuf] containing the /// rest of the path, or an error. #[cfg(target_family = "unix")] - pub fn from_absolute_path_full(s: &str) -> Result<(StorePath, PathBuf), Error> { + pub fn from_absolute_path_full<'a>(s: &'a str) -> Result<(Self, &'a Path), Error> + where + S: From<&'a str>, + { // strip [STORE_DIR_WITH_SLASH] from s + match s.strip_prefix(STORE_DIR_WITH_SLASH) { None => Err(Error::MissingStoreDir), Some(rest) => { - // put rest in a PathBuf - let mut p = PathBuf::new(); - p.push(rest); - - let mut it = p.components(); + let mut it = Path::new(rest).components(); // The first component of the rest must be parse-able as a [StorePath] if let Some(first_component) = it.next() { // convert first component to StorePath - let first_component_bytes = first_component.as_os_str().to_owned().into_vec(); - let store_path = StorePath::from_bytes(&first_component_bytes)?; + let store_path = StorePath::from_bytes(first_component.as_os_str().as_bytes())?; + // collect rest - let rest_buf: PathBuf = it.collect(); + let rest_buf = it.as_path(); + Ok((store_path, rest_buf)) } else { Err(Error::InvalidLength) // Well, or missing "/"? @@ -139,139 +190,48 @@ impl StorePath { /// Returns an absolute store path string. /// That is just the string representation, prefixed with the store prefix /// ([STORE_DIR_WITH_SLASH]), - pub fn to_absolute_path(&self) -> String { - let sp_ref: StorePathRef = self.into(); - sp_ref.to_absolute_path() - } -} - -impl<'de> Deserialize<'de> for StorePath { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'de>, - { - let r = <StorePathRef<'de> as Deserialize<'de>>::deserialize(deserializer)?; - Ok(r.to_owned()) - } -} - -impl Serialize for StorePath { - fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + pub fn to_absolute_path(&self) -> String where - S: serde::Serializer, + S: Display, { - let r: StorePathRef = self.into(); - r.serialize(serializer) - } -} - -/// Like [StorePath], but without a heap allocation for the name. -/// Used by [StorePath] for parsing. -/// -#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)] -pub struct StorePathRef<'a> { - digest: [u8; DIGEST_SIZE], - name: &'a str, -} - -impl<'a> From<&'a StorePath> for StorePathRef<'a> { - fn from(&StorePath { digest, ref name }: &'a StorePath) -> Self { - StorePathRef { digest, name } + format!("{}{}", STORE_DIR_WITH_SLASH, self) } } -impl<'a> PartialOrd for StorePathRef<'a> { +impl<S> PartialOrd for StorePath<S> +where + S: std::cmp::PartialEq + std::cmp::Eq, +{ fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } -/// `StorePathRef`s are sorted by their reverse digest to match the sorting order +/// `StorePath`s are sorted by their reverse digest to match the sorting order /// of the nixbase32-encoded string. -impl<'a> Ord for StorePathRef<'a> { +impl<S> Ord for StorePath<S> +where + S: std::cmp::PartialEq + std::cmp::Eq, +{ fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.digest.iter().rev().cmp(other.digest.iter().rev()) } } -impl<'a> StorePathRef<'a> { - pub fn digest(&self) -> &[u8; DIGEST_SIZE] { - &self.digest - } - - pub fn name(&self) -> &'a str { - self.name - } - - pub fn to_owned(&self) -> StorePath { - StorePath { - digest: self.digest, - name: self.name.into(), - } - } - - /// Construct a [StorePathRef] from a name and digest. - /// The name is validated, and the digest checked for size. - pub fn from_name_and_digest(name: &'a str, digest: &[u8]) -> Result<Self, Error> { - let digest_fixed = digest.try_into().map_err(|_| Error::InvalidLength)?; - Self::from_name_and_digest_fixed(name, digest_fixed) - } - - /// Construct a [StorePathRef] from a name and digest of correct length. - /// The name is validated. - pub fn from_name_and_digest_fixed( - name: &'a str, - digest: [u8; DIGEST_SIZE], - ) -> Result<Self, Error> { - Ok(Self { - name: validate_name(name.as_bytes())?, - digest, - }) - } - - /// Construct a [StorePathRef] from an absolute store path string. - /// This is equivalent to calling [StorePathRef::from_bytes], but stripping - /// the [STORE_DIR_WITH_SLASH] prefix before. - pub fn from_absolute_path(s: &'a [u8]) -> Result<Self, Error> { - match s.strip_prefix(STORE_DIR_WITH_SLASH.as_bytes()) { - Some(s_stripped) => Self::from_bytes(s_stripped), - None => Err(Error::MissingStoreDir), - } - } +impl<'a, 'b: 'a> FromStr for StorePath<String> { + type Err = Error; - /// Construct a [StorePathRef] by passing the `$digest-$name` string + /// Construct a [StorePath] by passing the `$digest-$name` string /// that comes after [STORE_DIR_WITH_SLASH]. - pub fn from_bytes(s: &'a [u8]) -> Result<Self, Error> { - // the whole string needs to be at least: - // - // - 32 characters (encoded hash) - // - 1 dash - // - 1 character for the name - if s.len() < ENCODED_DIGEST_SIZE + 2 { - Err(Error::InvalidLength)? - } - - let digest = nixbase32::decode_fixed(&s[..ENCODED_DIGEST_SIZE])?; - - if s[ENCODED_DIGEST_SIZE] != b'-' { - return Err(Error::MissingDash); - } - - Ok(StorePathRef { - digest, - name: validate_name(&s[ENCODED_DIGEST_SIZE + 1..])?, - }) - } - - /// Returns an absolute store path string. - /// That is just the string representation, prefixed with the store prefix - /// ([STORE_DIR_WITH_SLASH]), - pub fn to_absolute_path(&self) -> String { - format!("{}{}", STORE_DIR_WITH_SLASH, self) + fn from_str(s: &str) -> Result<Self, Self::Err> { + StorePath::<String>::from_bytes(s.as_bytes()) } } -impl<'de: 'a, 'a> Deserialize<'de> for StorePathRef<'a> { +impl<'a, 'de: 'a, S> Deserialize<'de> for StorePath<S> +where + S: std::cmp::Eq + Deref<Target = str> + From<&'a str>, +{ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, @@ -284,16 +244,19 @@ impl<'de: 'a, 'a> Deserialize<'de> for StorePathRef<'a> { &"store path prefix", ) })?; - StorePathRef::from_bytes(stripped.as_bytes()).map_err(|_| { + StorePath::from_bytes(stripped.as_bytes()).map_err(|_| { serde::de::Error::invalid_value(serde::de::Unexpected::Str(string), &"StorePath") }) } } -impl Serialize for StorePathRef<'_> { - fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> +impl<S> Serialize for StorePath<S> +where + S: std::cmp::Eq + Deref<Target = str> + Display, +{ + fn serialize<SR>(&self, serializer: SR) -> Result<SR::Ok, SR::Error> where - S: serde::Serializer, + SR: serde::Serializer, { let string: String = self.to_absolute_path(); string.serialize(serializer) @@ -347,13 +310,10 @@ pub(crate) fn validate_name(s: &(impl AsRef<[u8]> + ?Sized)) -> Result<&str, Err Ok(unsafe { str::from_utf8_unchecked(s) }) } -impl fmt::Display for StorePath { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - StorePathRef::from(self).fmt(f) - } -} - -impl fmt::Display for StorePathRef<'_> { +impl<S> fmt::Display for StorePath<S> +where + S: fmt::Display + std::cmp::Eq, +{ /// The string representation of a store path starts with a digest (20 /// bytes), [crate::nixbase32]-encoded, followed by a `-`, /// and ends with the name. @@ -386,12 +346,12 @@ mod tests { fn happy_path() { let example_nix_path_str = "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432"; - let nixpath = StorePath::from_bytes(example_nix_path_str.as_bytes()) + let nixpath = StorePathRef::from_bytes(example_nix_path_str.as_bytes()) .expect("Error parsing example string"); let expected_digest: [u8; DIGEST_SIZE] = hex!("8a12321522fd91efbd60ebb2481af88580f61600"); - assert_eq!("net-tools-1.60_p20170221182432", nixpath.name()); + assert_eq!("net-tools-1.60_p20170221182432", *nixpath.name()); assert_eq!(nixpath.digest, expected_digest); assert_eq!(example_nix_path_str, nixpath.to_string()) @@ -426,8 +386,8 @@ mod tests { if w.len() < 2 { continue; } - let (pa, _) = StorePath::from_absolute_path_full(w[0]).expect("parseable"); - let (pb, _) = StorePath::from_absolute_path_full(w[1]).expect("parseable"); + let (pa, _) = StorePathRef::from_absolute_path_full(w[0]).expect("parseable"); + let (pb, _) = StorePathRef::from_absolute_path_full(w[1]).expect("parseable"); assert_eq!( Ordering::Less, pa.cmp(&pb), @@ -448,36 +408,38 @@ mod tests { /// https://github.com/NixOS/nix/pull/9867 (revert-of-revert) #[test] fn starts_with_dot() { - StorePath::from_bytes(b"fli4bwscgna7lpm7v5xgnjxrxh0yc7ra-.gitignore") + StorePathRef::from_bytes(b"fli4bwscgna7lpm7v5xgnjxrxh0yc7ra-.gitignore") .expect("must succeed"); } #[test] fn empty_name() { - StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-").expect_err("must fail"); + StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-").expect_err("must fail"); } #[test] fn excessive_length() { - StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") .expect_err("must fail"); } #[test] fn invalid_hash_length() { - StorePath::from_bytes(b"00bgd045z0d4icpbc2yy-net-tools-1.60_p20170221182432") + StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yy-net-tools-1.60_p20170221182432") .expect_err("must fail"); } #[test] fn invalid_encoding_hash() { - StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432") - .expect_err("must fail"); + StorePathRef::from_bytes( + b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432", + ) + .expect_err("must fail"); } #[test] fn more_than_just_the_bare_nix_store_path() { - StorePath::from_bytes( + StorePathRef::from_bytes( b"00bgd045z0d4icpbc2yyz4gx48aku4la-net-tools-1.60_p20170221182432/bin/arp", ) .expect_err("must fail"); @@ -485,7 +447,7 @@ mod tests { #[test] fn no_dash_between_hash_and_name() { - StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44lanet-tools-1.60_p20170221182432") + StorePathRef::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44lanet-tools-1.60_p20170221182432") .expect_err("must fail"); } @@ -534,7 +496,7 @@ mod tests { #[test] fn serialize_owned() { - let nixpath_actual = StorePath::from_bytes( + let nixpath_actual = StorePathRef::from_bytes( b"00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432", ) .expect("can parse"); @@ -578,7 +540,8 @@ mod tests { let store_path_str_json = "\"/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432\""; - let store_path: StorePath = serde_json::from_str(store_path_str_json).expect("valid json"); + let store_path: StorePath<String> = + serde_json::from_str(store_path_str_json).expect("valid json"); assert_eq!( "/nix/store/00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432", @@ -601,7 +564,7 @@ mod tests { StorePath::from_bytes(b"00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432").unwrap(), PathBuf::from("bin/arp/"))] fn from_absolute_path_full( #[case] s: &str, - #[case] exp_store_path: StorePath, + #[case] exp_store_path: StorePath<&str>, #[case] exp_path: PathBuf, ) { let (actual_store_path, actual_path) = @@ -615,15 +578,15 @@ mod tests { fn from_absolute_path_errors() { assert_eq!( Error::InvalidLength, - StorePath::from_absolute_path_full("/nix/store/").expect_err("must fail") + StorePathRef::from_absolute_path_full("/nix/store/").expect_err("must fail") ); assert_eq!( Error::InvalidLength, - StorePath::from_absolute_path_full("/nix/store/foo").expect_err("must fail") + StorePathRef::from_absolute_path_full("/nix/store/foo").expect_err("must fail") ); assert_eq!( Error::MissingStoreDir, - StorePath::from_absolute_path_full( + StorePathRef::from_absolute_path_full( "00bgd045z0d4icpbc2yyz4gx48ak44la-net-tools-1.60_p20170221182432" ) .expect_err("must fail") diff --git a/tvix/nix-compat/src/store_path/utils.rs b/tvix/nix-compat/src/store_path/utils.rs index d6f390db85c2..4bfbb72fcdde 100644 --- a/tvix/nix-compat/src/store_path/utils.rs +++ b/tvix/nix-compat/src/store_path/utils.rs @@ -1,6 +1,6 @@ use crate::nixbase32; use crate::nixhash::{CAHash, NixHash}; -use crate::store_path::{Error, StorePathRef, STORE_DIR}; +use crate::store_path::{Error, StorePath, StorePathRef, STORE_DIR}; use data_encoding::HEXLOWER; use sha2::{Digest, Sha256}; use thiserror; @@ -43,11 +43,17 @@ pub fn compress_hash<const OUTPUT_SIZE: usize>(input: &[u8]) -> [u8; OUTPUT_SIZE /// derivation or a literal text file that may contain references. /// If you don't want to have to pass the entire contents, you might want to use /// [build_ca_path] instead. -pub fn build_text_path<S: AsRef<str>, I: IntoIterator<Item = S>, C: AsRef<[u8]>>( - name: &str, +pub fn build_text_path<'a, S, SP, I, C>( + name: &'a str, content: C, references: I, -) -> Result<StorePathRef<'_>, BuildStorePathError> { +) -> Result<StorePath<SP>, BuildStorePathError> +where + S: AsRef<str>, + SP: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>, + I: IntoIterator<Item = S>, + C: AsRef<[u8]>, +{ // produce the sha256 digest of the contents let content_digest = Sha256::new_with_prefix(content).finalize().into(); @@ -55,12 +61,17 @@ pub fn build_text_path<S: AsRef<str>, I: IntoIterator<Item = S>, C: AsRef<[u8]>> } /// This builds a store path from a [CAHash] and a list of references. -pub fn build_ca_path<'a, S: AsRef<str>, I: IntoIterator<Item = S>>( +pub fn build_ca_path<'a, S, SP, I>( name: &'a str, ca_hash: &CAHash, references: I, self_reference: bool, -) -> Result<StorePathRef<'a>, BuildStorePathError> { +) -> Result<StorePath<SP>, BuildStorePathError> +where + S: AsRef<str>, + SP: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>, + I: IntoIterator<Item = S>, +{ // self references are only allowed for CAHash::Nar(NixHash::Sha256(_)). if self_reference && matches!(ca_hash, CAHash::Nar(NixHash::Sha256(_))) { return Err(BuildStorePathError::InvalidReference()); @@ -145,17 +156,20 @@ pub fn build_output_path<'a>( /// bytes. /// Inside a StorePath, that digest is printed nixbase32-encoded /// (32 characters). -fn build_store_path_from_fingerprint_parts<'a>( +fn build_store_path_from_fingerprint_parts<'a, S>( ty: &str, inner_digest: &[u8; 32], name: &'a str, -) -> Result<StorePathRef<'a>, Error> { +) -> Result<StorePath<S>, Error> +where + S: std::cmp::Eq + std::ops::Deref<Target = str> + std::convert::From<&'a str>, +{ let fingerprint = format!( "{ty}:sha256:{}:{STORE_DIR}:{name}", HEXLOWER.encode(inner_digest) ); // name validation happens in here. - StorePathRef::from_name_and_digest_fixed( + StorePath::from_name_and_digest_fixed( name, compress_hash(&Sha256::new_with_prefix(fingerprint).finalize()), ) @@ -216,7 +230,7 @@ mod test { // nix-repl> builtins.toFile "foo" "bar" // "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo" - let store_path = build_text_path("foo", "bar", Vec::<String>::new()) + let store_path: StorePathRef = build_text_path("foo", "bar", Vec::<String>::new()) .expect("build_store_path() should succeed"); assert_eq!( @@ -232,11 +246,11 @@ mod test { // nix-repl> builtins.toFile "baz" "${builtins.toFile "foo" "bar"}" // "/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz" - let inner = build_text_path("foo", "bar", Vec::<String>::new()) + let inner: StorePathRef = build_text_path("foo", "bar", Vec::<String>::new()) .expect("path_with_references() should succeed"); let inner_path = inner.to_absolute_path(); - let outer = build_text_path("baz", &inner_path, vec![inner_path.as_str()]) + let outer: StorePathRef = build_text_path("baz", &inner_path, vec![inner_path.as_str()]) .expect("path_with_references() should succeed"); assert_eq!( @@ -247,7 +261,7 @@ mod test { #[test] fn build_sha1_path() { - let outer = build_ca_path( + let outer: StorePathRef = build_ca_path( "bar", &CAHash::Nar(NixHash::Sha1(hex!( "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33" @@ -272,7 +286,7 @@ mod test { // // $ nix store make-content-addressed /nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz // rewrote '/nix/store/5xd714cbfnkz02h2vbsj4fm03x3f15nf-baz' to '/nix/store/s89y431zzhmdn3k8r96rvakryddkpv2v-baz' - let outer = build_ca_path( + let outer: StorePathRef = build_ca_path( "baz", &CAHash::Nar(NixHash::Sha256( nixbase32::decode(b"1xqkzcb3909fp07qngljr4wcdnrh1gdam1m2n29i6hhrxlmkgkv1") diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs index 5a21b375b575..1719669a4285 100644 --- a/tvix/store/src/import.rs +++ b/tvix/store/src/import.rs @@ -8,7 +8,7 @@ use tvix_castore::{ use nix_compat::{ nixhash::{CAHash, NixHash}, - store_path::{self, StorePath}, + store_path::{self, StorePathRef}, }; use crate::{ @@ -115,7 +115,7 @@ pub async fn import_path_as_nar_ca<BS, DS, PS, NS, P>( directory_service: DS, path_info_service: PS, nar_calculation_service: NS, -) -> Result<StorePath, std::io::Error> +) -> Result<StorePathRef, std::io::Error> where P: AsRef<Path> + std::fmt::Debug, BS: BlobService + Clone, @@ -161,7 +161,7 @@ where // callers don't really need it. let _path_info = path_info_service.as_ref().put(path_info).await?; - Ok(output_path.to_owned()) + Ok(output_path) } #[cfg(test)] diff --git a/tvix/store/src/pathinfoservice/fs/mod.rs b/tvix/store/src/pathinfoservice/fs/mod.rs index d97032b6658e..1f7fa8a8afce 100644 --- a/tvix/store/src/pathinfoservice/fs/mod.rs +++ b/tvix/store/src/pathinfoservice/fs/mod.rs @@ -1,5 +1,6 @@ use futures::stream::BoxStream; use futures::StreamExt; +use nix_compat::store_path::StorePathRef; use tonic::async_trait; use tvix_castore::fs::{RootNodes, TvixStoreFs}; use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService}; @@ -48,7 +49,7 @@ where T: AsRef<dyn PathInfoService> + Send + Sync, { async fn get_by_basename(&self, name: &PathComponent) -> Result<Option<Node>, Error> { - let Ok(store_path) = nix_compat::store_path::StorePath::from_bytes(name.as_ref()) else { + let Ok(store_path) = StorePathRef::from_bytes(name.as_ref()) else { return Ok(None); }; diff --git a/tvix/store/src/proto/mod.rs b/tvix/store/src/proto/mod.rs index e6b411664037..f3ea4b196946 100644 --- a/tvix/store/src/proto/mod.rs +++ b/tvix/store/src/proto/mod.rs @@ -87,7 +87,7 @@ impl PathInfo { /// validate performs some checks on the PathInfo struct, /// Returning either a [store_path::StorePath] of the root node, or a /// [ValidatePathInfoError]. - pub fn validate(&self) -> Result<store_path::StorePath, ValidatePathInfoError> { + pub fn validate(&self) -> Result<store_path::StorePath<String>, ValidatePathInfoError> { // ensure the references have the right number of bytes. for (i, reference) in self.references.iter().enumerate() { if reference.len() != store_path::DIGEST_SIZE { @@ -118,7 +118,7 @@ impl PathInfo { // parse references in reference_names. for (i, reference_name_str) in narinfo.reference_names.iter().enumerate() { // ensure thy parse as (non-absolute) store path - let reference_names_store_path = store_path::StorePath::from_bytes( + let reference_names_store_path = store_path::StorePathRef::from_bytes( reference_name_str.as_bytes(), ) .map_err(|_| { @@ -160,6 +160,10 @@ impl PathInfo { let root_nix_path = match &self.node { None => Err(ValidatePathInfoError::NoNodePresent)?, Some(node) => { + // NOTE: We could have some PathComponent not allocating here, + // so this can return StorePathRef. + // However, as this will get refactored away to stricter types + // soon anyways, there's no point. let (name, _node) = node .clone() .into_name_and_node() @@ -356,7 +360,7 @@ impl From<&nix_compat::narinfo::NarInfo<'_>> for NarInfo { signatures, reference_names: value.references.iter().map(|r| r.to_string()).collect(), deriver: value.deriver.as_ref().map(|sp| StorePath { - name: sp.name().to_owned(), + name: (*sp.name()).to_owned(), digest: Bytes::copy_from_slice(sp.digest()), }), ca: value.ca.as_ref().map(|ca| ca.into()), diff --git a/tvix/store/src/proto/tests/pathinfo.rs b/tvix/store/src/proto/tests/pathinfo.rs index 253cafad01f4..eb47e592be8c 100644 --- a/tvix/store/src/proto/tests/pathinfo.rs +++ b/tvix/store/src/proto/tests/pathinfo.rs @@ -14,7 +14,7 @@ use tvix_castore::{DirectoryError, ValidateNodeError}; fn validate_pathinfo( #[case] node: Option<castorepb::Node>, - #[case] exp_result: Result<StorePath, ValidatePathInfoError>, + #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { @@ -46,7 +46,7 @@ fn validate_pathinfo( )))] fn validate_directory( #[case] directory_node: castorepb::DirectoryNode, - #[case] exp_result: Result<StorePath, ValidatePathInfoError>, + #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { @@ -89,7 +89,7 @@ fn validate_directory( )] fn validate_file( #[case] file_node: castorepb::FileNode, - #[case] exp_result: Result<StorePath, ValidatePathInfoError>, + #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { @@ -121,7 +121,7 @@ fn validate_file( )] fn validate_symlink( #[case] symlink_node: castorepb::SymlinkNode, - #[case] exp_result: Result<StorePath, ValidatePathInfoError>, + #[case] exp_result: Result<StorePath<String>, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { |