diff options
Diffstat (limited to 'tvix/store/src')
-rw-r--r-- | tvix/store/src/bin/tvix-store.rs | 8 | ||||
-rw-r--r-- | tvix/store/src/directoryservice/traverse.rs | 10 | ||||
-rw-r--r-- | tvix/store/src/fuse/inode_tracker.rs | 13 | ||||
-rw-r--r-- | tvix/store/src/fuse/inodes.rs | 2 | ||||
-rw-r--r-- | tvix/store/src/fuse/mod.rs | 11 | ||||
-rw-r--r-- | tvix/store/src/fuse/tests.rs | 20 | ||||
-rw-r--r-- | tvix/store/src/import.rs | 34 | ||||
-rw-r--r-- | tvix/store/src/nar/mod.rs | 12 | ||||
-rw-r--r-- | tvix/store/src/proto/grpc_directoryservice_wrapper.rs | 4 | ||||
-rw-r--r-- | tvix/store/src/proto/mod.rs | 83 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/directory.rs | 60 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/directory_nodes_iterator.rs | 34 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/grpc_directoryservice.rs | 6 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/grpc_pathinfoservice.rs | 4 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/pathinfo.rs | 24 | ||||
-rw-r--r-- | tvix/store/src/store_io.rs | 30 | ||||
-rw-r--r-- | tvix/store/src/tests/fixtures.rs | 16 | ||||
-rw-r--r-- | tvix/store/src/tests/import.rs | 17 | ||||
-rw-r--r-- | tvix/store/src/tests/nar_renderer.rs | 16 |
19 files changed, 197 insertions, 207 deletions
diff --git a/tvix/store/src/bin/tvix-store.rs b/tvix/store/src/bin/tvix-store.rs index 054b4f957206..fa5b9bfca9f0 100644 --- a/tvix/store/src/bin/tvix-store.rs +++ b/tvix/store/src/bin/tvix-store.rs @@ -234,7 +234,7 @@ fn print_node(node: &Node, path: &Path) { Node::Directory(directory_node) => { info!( path = ?path, - name = directory_node.name, + name = ?directory_node.name, digest = BASE64.encode(&directory_node.digest), "import successful", ) @@ -242,7 +242,7 @@ fn print_node(node: &Node, path: &Path) { Node::File(file_node) => { info!( path = ?path, - name = file_node.name, + name = ?file_node.name, digest = BASE64.encode(&file_node.digest), "import successful" ) @@ -250,8 +250,8 @@ fn print_node(node: &Node, path: &Path) { Node::Symlink(symlink_node) => { info!( path = ?path, - name = symlink_node.name, - target = symlink_node.target, + name = ?symlink_node.name, + target = ?symlink_node.target, "import successful" ) } diff --git a/tvix/store/src/directoryservice/traverse.rs b/tvix/store/src/directoryservice/traverse.rs index 8dfccd4ffbfa..17f709f40d1d 100644 --- a/tvix/store/src/directoryservice/traverse.rs +++ b/tvix/store/src/directoryservice/traverse.rs @@ -1,6 +1,6 @@ use super::DirectoryService; use crate::{proto::NamedNode, B3Digest, Error}; -use std::sync::Arc; +use std::{os::unix::ffi::OsStrExt, sync::Arc}; use tracing::{instrument, warn}; /// This traverses from a (root) node to the given (sub)path, returning the Node @@ -58,9 +58,9 @@ pub fn traverse_to( // look for first_component in the [Directory]. // FUTUREWORK: as the nodes() iterator returns in a sorted fashion, we // could stop as soon as e.name is larger than the search string. - let child_node = directory.nodes().find(|n| { - n.get_name() == first_component.as_os_str().to_str().unwrap() - }); + let child_node = directory + .nodes() + .find(|n| n.get_name() == first_component.as_os_str().as_bytes()); match child_node { // child node not found means there's no such element inside the directory. @@ -105,7 +105,7 @@ mod tests { // construct the node for DIRECTORY_COMPLICATED let node_directory_complicated = crate::proto::node::Node::Directory(crate::proto::DirectoryNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: DIRECTORY_COMPLICATED.digest().to_vec(), size: DIRECTORY_COMPLICATED.size(), }); diff --git a/tvix/store/src/fuse/inode_tracker.rs b/tvix/store/src/fuse/inode_tracker.rs index 06434d25b37d..8d91564712d0 100644 --- a/tvix/store/src/fuse/inode_tracker.rs +++ b/tvix/store/src/fuse/inode_tracker.rs @@ -13,7 +13,7 @@ pub struct InodeTracker { blob_digest_to_inode: HashMap<B3Digest, u64>, // lookup table for symlinks by their target - symlink_target_to_inode: HashMap<String, u64>, + symlink_target_to_inode: HashMap<Vec<u8>, u64>, // lookup table for directories by their B3Digest. // Note the corresponding directory may not be present in data yet. @@ -171,7 +171,7 @@ impl InodeTracker { self.blob_digest_to_inode.insert(digest.clone(), ino); } InodeData::Symlink(ref target) => { - self.symlink_target_to_inode.insert(target.to_string(), ino); + self.symlink_target_to_inode.insert(target.to_vec(), ino); } InodeData::Directory(DirectoryInodeData::Sparse(ref digest, _size)) => { self.directory_digest_to_inode.insert(digest.clone(), ino); @@ -251,7 +251,7 @@ mod tests { #[test] fn put_symlink() { let mut inode_tracker = InodeTracker::default(); - let f = InodeData::Symlink("target".to_string()); + let f = InodeData::Symlink("target".into()); // put it in let ino = inode_tracker.put(f.clone()); @@ -260,7 +260,7 @@ mod tests { let data = inode_tracker.get(ino).expect("must be some"); match *data { InodeData::Symlink(ref target) => { - assert_eq!("target", target); + assert_eq!(b"target".to_vec(), *target); } InodeData::Regular(..) | InodeData::Directory(..) => panic!("wrong type"), } @@ -269,10 +269,7 @@ mod tests { assert_eq!(ino, inode_tracker.put(f)); // inserting another file should return a different ino - assert_ne!( - ino, - inode_tracker.put(InodeData::Symlink("target2".to_string())) - ); + assert_ne!(ino, inode_tracker.put(InodeData::Symlink("target2".into()))); } // TODO: put sparse directory diff --git a/tvix/store/src/fuse/inodes.rs b/tvix/store/src/fuse/inodes.rs index 78756472be20..bf883cb22ff8 100644 --- a/tvix/store/src/fuse/inodes.rs +++ b/tvix/store/src/fuse/inodes.rs @@ -5,7 +5,7 @@ use crate::{proto, B3Digest}; #[derive(Clone, Debug)] pub enum InodeData { Regular(B3Digest, u32, bool), // digest, size, executable - Symlink(String), // target + Symlink(Vec<u8>), // target Directory(DirectoryInodeData), // either [DirectoryInodeData:Sparse] or [DirectoryInodeData:Populated] } diff --git a/tvix/store/src/fuse/mod.rs b/tvix/store/src/fuse/mod.rs index dd2c479b3de5..077c6ce048bf 100644 --- a/tvix/store/src/fuse/mod.rs +++ b/tvix/store/src/fuse/mod.rs @@ -19,6 +19,7 @@ use crate::{ use fuser::{FileAttr, ReplyAttr, Request}; use nix_compat::store_path::StorePath; use std::io::{self, Read, Seek}; +use std::os::unix::ffi::OsStrExt; use std::str::FromStr; use std::sync::Arc; use std::{collections::HashMap, time::Duration}; @@ -142,7 +143,7 @@ impl FUSE { let root_node = path_info.node.unwrap().node.unwrap(); // The name must match what's passed in the lookup, otherwise we return nothing. - if root_node.get_name() != store_path.to_string() { + if root_node.get_name() != store_path.to_string().as_bytes() { return Ok(None); } @@ -279,7 +280,9 @@ impl fuser::Filesystem for FUSE { let _enter = span.enter(); // in the children, find the one with the desired name. - if let Some((child_ino, _)) = children.iter().find(|e| e.1.get_name() == name) { + if let Some((child_ino, _)) = + children.iter().find(|e| e.1.get_name() == name.as_bytes()) + { // lookup the child [InodeData] in [self.inode_tracker]. // We know the inodes for children have already been allocated. let child_inode_data = self.inode_tracker.get(*child_ino).unwrap(); @@ -353,7 +356,7 @@ impl fuser::Filesystem for FUSE { Node::File(_) => fuser::FileType::RegularFile, Node::Symlink(_) => fuser::FileType::Symlink, }, - child_node.get_name(), + std::ffi::OsStr::from_bytes(child_node.get_name()), ); if full { break; @@ -480,7 +483,7 @@ impl fuser::Filesystem for FUSE { InodeData::Directory(..) | InodeData::Regular(..) => { reply.error(libc::EINVAL); } - InodeData::Symlink(ref target) => reply.data(target.as_bytes()), + InodeData::Symlink(ref target) => reply.data(target), } } } diff --git a/tvix/store/src/fuse/tests.rs b/tvix/store/src/fuse/tests.rs index 1a53b9f5ddd1..8577e062e928 100644 --- a/tvix/store/src/fuse/tests.rs +++ b/tvix/store/src/fuse/tests.rs @@ -57,7 +57,7 @@ fn populate_blob_a( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::File(FileNode { - name: BLOB_A_NAME.to_string(), + name: BLOB_A_NAME.into(), digest: fixtures::BLOB_A_DIGEST.to_vec(), size: fixtures::BLOB_A.len() as u32, executable: false, @@ -83,7 +83,7 @@ fn populate_blob_b( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::File(FileNode { - name: BLOB_B_NAME.to_string(), + name: BLOB_B_NAME.into(), digest: fixtures::BLOB_B_DIGEST.to_vec(), size: fixtures::BLOB_B.len() as u32, executable: false, @@ -103,8 +103,8 @@ fn populate_symlink( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::Symlink(proto::SymlinkNode { - name: SYMLINK_NAME.to_string(), - target: BLOB_A_NAME.to_string(), + name: SYMLINK_NAME.into(), + target: BLOB_A_NAME.into(), })), }), ..Default::default() @@ -123,8 +123,8 @@ fn populate_symlink2( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::Symlink(proto::SymlinkNode { - name: SYMLINK_NAME2.to_string(), - target: "/nix/store/somewhereelse".to_string(), + name: SYMLINK_NAME2.into(), + target: "/nix/store/somewhereelse".into(), })), }), ..Default::default() @@ -153,7 +153,7 @@ fn populate_directory_with_keep( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::Directory(DirectoryNode { - name: DIRECTORY_WITH_KEEP_NAME.to_string(), + name: DIRECTORY_WITH_KEEP_NAME.into(), digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(), size: fixtures::DIRECTORY_WITH_KEEP.size(), })), @@ -174,7 +174,7 @@ fn populate_pathinfo_without_directory( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::Directory(DirectoryNode { - name: DIRECTORY_WITH_KEEP_NAME.to_string(), + name: DIRECTORY_WITH_KEEP_NAME.into(), digest: fixtures::DIRECTORY_WITH_KEEP.digest().to_vec(), size: fixtures::DIRECTORY_WITH_KEEP.size(), })), @@ -194,7 +194,7 @@ fn populate_blob_a_without_blob( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::File(FileNode { - name: BLOB_A_NAME.to_string(), + name: BLOB_A_NAME.into(), digest: fixtures::BLOB_A_DIGEST.to_vec(), size: fixtures::BLOB_A.len() as u32, executable: false, @@ -231,7 +231,7 @@ fn populate_directory_complicated( let path_info = PathInfo { node: Some(proto::Node { node: Some(proto::node::Node::Directory(DirectoryNode { - name: DIRECTORY_COMPLICATED_NAME.to_string(), + name: DIRECTORY_COMPLICATED_NAME.into(), digest: fixtures::DIRECTORY_COMPLICATED.digest().to_vec(), size: fixtures::DIRECTORY_COMPLICATED.size(), })), diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs index dd366aef9576..74c45c7a7da8 100644 --- a/tvix/store/src/import.rs +++ b/tvix/store/src/import.rs @@ -1,6 +1,7 @@ use crate::blobservice::BlobService; use crate::directoryservice::DirectoryService; use crate::{directoryservice::DirectoryPutter, proto}; +use std::os::unix::ffi::OsStrExt; use std::sync::Arc; use std::{ collections::HashMap, @@ -79,11 +80,7 @@ fn process_entry( .map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?; return Ok(proto::node::Node::Directory(proto::DirectoryNode { - name: entry - .file_name() - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + name: entry.file_name().as_bytes().to_vec(), digest: directory_digest.to_vec(), size: directory_size, })); @@ -94,15 +91,8 @@ fn process_entry( .map_err(|e| Error::UnableToStat(entry_path.clone(), e))?; return Ok(proto::node::Node::Symlink(proto::SymlinkNode { - name: entry - .file_name() - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, - target: target - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + name: entry.file_name().as_bytes().to_vec(), + target: target.as_os_str().as_bytes().to_vec(), })); } @@ -123,11 +113,7 @@ fn process_entry( let digest = writer.close()?; return Ok(proto::node::Node::File(proto::FileNode { - name: entry - .file_name() - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?, + name: entry.file_name().as_bytes().to_vec(), digest: digest.to_vec(), size: metadata.len() as u32, // If it's executable by the user, it'll become executable. @@ -163,13 +149,9 @@ pub fn ingest_path<P: AsRef<Path> + Debug>( .as_ref() .file_name() .unwrap_or_default() - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?, - target: target - .to_str() - .map(|s| Ok(s.to_owned())) - .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?, + .as_bytes() + .to_vec(), + target: target.as_os_str().as_bytes().to_vec(), })); } diff --git a/tvix/store/src/nar/mod.rs b/tvix/store/src/nar/mod.rs index 13e4f7bd93f6..84a48ba5f6b5 100644 --- a/tvix/store/src/nar/mod.rs +++ b/tvix/store/src/nar/mod.rs @@ -12,14 +12,14 @@ pub enum RenderError { #[error("failure talking to a backing store client: {0}")] StoreError(crate::Error), - #[error("unable to find directory {}, referred from {}", .0, .1)] - DirectoryNotFound(B3Digest, String), + #[error("unable to find directory {}, referred from {:?}", .0, .1)] + DirectoryNotFound(B3Digest, Vec<u8>), - #[error("unable to find blob {}, referred from {}", BASE64.encode(.0), .1)] - BlobNotFound([u8; 32], String), + #[error("unable to find blob {}, referred from {:?}", BASE64.encode(.0), .1)] + BlobNotFound([u8; 32], Vec<u8>), - #[error("unexpected size in metadata for blob {}, referred from {} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)] - UnexpectedBlobMeta([u8; 32], String, u32, u32), + #[error("unexpected size in metadata for blob {}, referred from {:?} returned, expected {}, got {}", BASE64.encode(.0), .1, .2, .3)] + UnexpectedBlobMeta([u8; 32], Vec<u8>, u32, u32), #[error("failure using the NAR writer: {0}")] NARWriterError(std::io::Error), diff --git a/tvix/store/src/proto/grpc_directoryservice_wrapper.rs b/tvix/store/src/proto/grpc_directoryservice_wrapper.rs index 434d660f3d41..ec9e3cb123eb 100644 --- a/tvix/store/src/proto/grpc_directoryservice_wrapper.rs +++ b/tvix/store/src/proto/grpc_directoryservice_wrapper.rs @@ -116,7 +116,7 @@ impl proto::directory_service_server::DirectoryService for GRPCDirectoryServiceW match seen_directories_sizes.get(&child_directory_digest) { None => { return Err(Status::invalid_argument(format!( - "child directory '{}' ({}) in directory '{}' not seen yet", + "child directory '{:?}' ({}) in directory '{}' not seen yet", child_directory.name, &child_directory_digest, &directory.digest(), @@ -125,7 +125,7 @@ impl proto::directory_service_server::DirectoryService for GRPCDirectoryServiceW Some(seen_child_directory_size) => { if seen_child_directory_size != &child_directory.size { return Err(Status::invalid_argument(format!( - "child directory '{}' ({}) in directory '{}' referred with wrong size, expected {}, actual {}", + "child directory '{:?}' ({}) in directory '{}' referred with wrong size, expected {}, actual {}", child_directory.name, &child_directory_digest, &directory.digest(), diff --git a/tvix/store/src/proto/mod.rs b/tvix/store/src/proto/mod.rs index 7e69726632ce..7e81efc51782 100644 --- a/tvix/store/src/proto/mod.rs +++ b/tvix/store/src/proto/mod.rs @@ -1,6 +1,5 @@ #![allow(clippy::derive_partial_eq_without_eq)] // https://github.com/hyperium/tonic/issues/1056 -use std::str::FromStr; use std::{collections::HashSet, iter::Peekable}; use thiserror::Error; @@ -35,14 +34,14 @@ mod tests; #[derive(Debug, PartialEq, Eq, Error)] pub enum ValidateDirectoryError { /// Elements are not in sorted order - #[error("{0} is not sorted")] - WrongSorting(String), + #[error("{0:?} is not sorted")] + WrongSorting(Vec<u8>), /// Multiple elements with the same name encountered - #[error("{0} is a duplicate name")] - DuplicateName(String), + #[error("{0:?} is a duplicate name")] + DuplicateName(Vec<u8>), /// Invalid name encountered - #[error("Invalid name in {0}")] - InvalidName(String), + #[error("Invalid name in {0:?}")] + InvalidName(Vec<u8>), /// Invalid digest length encountered #[error("Invalid Digest length: {0}")] InvalidDigestLen(usize), @@ -56,8 +55,8 @@ pub enum ValidatePathInfoError { NoNodePresent(), /// Invalid node name encountered. - #[error("Failed to parse {0} as StorePath: {1}")] - InvalidNodeName(String, store_path::Error), + #[error("Failed to parse {0:?} as StorePath: {1}")] + InvalidNodeName(Vec<u8>, store_path::Error), /// The digest the (root) node refers to has invalid length. #[error("Invalid Digest length: {0}")] @@ -73,10 +72,14 @@ pub enum ValidatePathInfoError { /// error that's generated from the supplied constructor. /// /// We disallow slashes, null bytes, '.', '..' and the empty string. -fn validate_node_name<E>(name: &str, err: fn(String) -> E) -> Result<(), E> { - if name.is_empty() || name == ".." || name == "." || name.contains('\x00') || name.contains('/') +fn validate_node_name<E>(name: &[u8], err: fn(Vec<u8>) -> E) -> Result<(), E> { + if name.is_empty() + || name == b".." + || name == b"." + || name.contains(&0x00) + || name.contains(&b'/') { - return Err(err(name.to_string())); + return Err(err(name.to_vec())); } Ok(()) } @@ -95,12 +98,12 @@ fn validate_digest<E>(digest: &Vec<u8>, err: fn(usize) -> E) -> Result<(), E> { /// On success, this returns the parsed [StorePath]. /// On error, it returns an error generated from the supplied constructor. fn parse_node_name_root<E>( - name: &str, - err: fn(String, store_path::Error) -> E, + name: &[u8], + err: fn(Vec<u8>, store_path::Error) -> E, ) -> Result<StorePath, E> { - match StorePath::from_str(name) { + match StorePath::from_bytes(name) { Ok(np) => Ok(np), - Err(e) => Err(err(name.to_string(), e)), + Err(e) => Err(err(name.to_vec(), e)), } } @@ -169,29 +172,29 @@ impl PathInfo { /// NamedNode is implemented for [FileNode], [DirectoryNode] and [SymlinkNode] /// and [node::Node], so we can ask all of them for the name easily. pub trait NamedNode { - fn get_name(&self) -> &str; + fn get_name(&self) -> &[u8]; } impl NamedNode for &FileNode { - fn get_name(&self) -> &str { - self.name.as_str() + fn get_name(&self) -> &[u8] { + &self.name } } impl NamedNode for &DirectoryNode { - fn get_name(&self) -> &str { - self.name.as_str() + fn get_name(&self) -> &[u8] { + &self.name } } impl NamedNode for &SymlinkNode { - fn get_name(&self) -> &str { - self.name.as_str() + fn get_name(&self) -> &[u8] { + &self.name } } impl NamedNode for node::Node { - fn get_name(&self) -> &str { + fn get_name(&self) -> &[u8] { match self { node::Node::File(node_file) => &node_file.name, node::Node::Directory(node_directory) => &node_directory.name, @@ -204,11 +207,11 @@ impl NamedNode for node::Node { /// If the passed name is larger than the previous one, the reference is updated. /// If it's not, an error is returned. fn update_if_lt_prev<'n>( - prev_name: &mut &'n str, - name: &'n str, + prev_name: &mut &'n [u8], + name: &'n [u8], ) -> Result<(), ValidateDirectoryError> { if *name < **prev_name { - return Err(ValidateDirectoryError::WrongSorting(name.to_string())); + return Err(ValidateDirectoryError::WrongSorting(name.to_vec())); } *prev_name = name; Ok(()) @@ -217,11 +220,11 @@ fn update_if_lt_prev<'n>( /// Inserts the given name into a HashSet if it's not already in there. /// If it is, an error is returned. fn insert_once<'n>( - seen_names: &mut HashSet<&'n str>, - name: &'n str, + seen_names: &mut HashSet<&'n [u8]>, + name: &'n [u8], ) -> Result<(), ValidateDirectoryError> { if seen_names.get(name).is_some() { - return Err(ValidateDirectoryError::DuplicateName(name.to_string())); + return Err(ValidateDirectoryError::DuplicateName(name.to_vec())); } seen_names.insert(name); Ok(()) @@ -258,11 +261,11 @@ impl Directory { /// - not properly sorted lists /// - duplicate names in the three lists pub fn validate(&self) -> Result<(), ValidateDirectoryError> { - let mut seen_names: HashSet<&str> = HashSet::new(); + let mut seen_names: HashSet<&[u8]> = HashSet::new(); - let mut last_directory_name: &str = ""; - let mut last_file_name: &str = ""; - let mut last_symlink_name: &str = ""; + let mut last_directory_name: &[u8] = b""; + let mut last_file_name: &[u8] = b""; + let mut last_symlink_name: &[u8] = b""; // check directories for directory_node in &self.directories { @@ -272,8 +275,8 @@ impl Directory { ValidateDirectoryError::InvalidDigestLen, )?; - update_if_lt_prev(&mut last_directory_name, directory_node.name.as_str())?; - insert_once(&mut seen_names, directory_node.name.as_str())?; + update_if_lt_prev(&mut last_directory_name, &directory_node.name)?; + insert_once(&mut seen_names, &directory_node.name)?; } // check files @@ -281,16 +284,16 @@ impl Directory { validate_node_name(&file_node.name, ValidateDirectoryError::InvalidName)?; validate_digest(&file_node.digest, ValidateDirectoryError::InvalidDigestLen)?; - update_if_lt_prev(&mut last_file_name, file_node.name.as_str())?; - insert_once(&mut seen_names, file_node.name.as_str())?; + update_if_lt_prev(&mut last_file_name, &file_node.name)?; + insert_once(&mut seen_names, &file_node.name)?; } // check symlinks for symlink_node in &self.symlinks { validate_node_name(&symlink_node.name, ValidateDirectoryError::InvalidName)?; - update_if_lt_prev(&mut last_symlink_name, symlink_node.name.as_str())?; - insert_once(&mut seen_names, symlink_node.name.as_str())?; + update_if_lt_prev(&mut last_symlink_name, &symlink_node.name)?; + insert_once(&mut seen_names, &symlink_node.name)?; } Ok(()) diff --git a/tvix/store/src/proto/tests/directory.rs b/tvix/store/src/proto/tests/directory.rs index 8d6ca7241d7a..48eeaa7b5fb4 100644 --- a/tvix/store/src/proto/tests/directory.rs +++ b/tvix/store/src/proto/tests/directory.rs @@ -20,7 +20,7 @@ fn size() { { let d = Directory { directories: vec![DirectoryNode { - name: String::from("foo"), + name: "foo".into(), digest: DUMMY_DIGEST.to_vec(), size: 0, }], @@ -31,7 +31,7 @@ fn size() { { let d = Directory { directories: vec![DirectoryNode { - name: String::from("foo"), + name: "foo".into(), digest: DUMMY_DIGEST.to_vec(), size: 4, }], @@ -42,7 +42,7 @@ fn size() { { let d = Directory { files: vec![FileNode { - name: String::from("foo"), + name: "foo".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, executable: false, @@ -54,8 +54,8 @@ fn size() { { let d = Directory { symlinks: vec![SymlinkNode { - name: String::from("foo"), - target: String::from("bar"), + name: "foo".into(), + target: "bar".into(), }], ..Default::default() }; @@ -89,7 +89,7 @@ fn validate_invalid_names() { { let d = Directory { directories: vec![DirectoryNode { - name: "".to_string(), + name: "".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }], @@ -97,7 +97,7 @@ fn validate_invalid_names() { }; match d.validate().expect_err("must fail") { ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, "") + assert_eq!(n, b"") } _ => panic!("unexpected error"), }; @@ -106,7 +106,7 @@ fn validate_invalid_names() { { let d = Directory { directories: vec![DirectoryNode { - name: ".".to_string(), + name: ".".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }], @@ -114,7 +114,7 @@ fn validate_invalid_names() { }; match d.validate().expect_err("must fail") { ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, ".") + assert_eq!(n, b".") } _ => panic!("unexpected error"), }; @@ -123,7 +123,7 @@ fn validate_invalid_names() { { let d = Directory { files: vec![FileNode { - name: "..".to_string(), + name: "..".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, executable: false, @@ -132,7 +132,7 @@ fn validate_invalid_names() { }; match d.validate().expect_err("must fail") { ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, "..") + assert_eq!(n, b"..") } _ => panic!("unexpected error"), }; @@ -141,14 +141,14 @@ fn validate_invalid_names() { { let d = Directory { symlinks: vec![SymlinkNode { - name: "\x00".to_string(), - target: "foo".to_string(), + name: "\x00".into(), + target: "foo".into(), }], ..Default::default() }; match d.validate().expect_err("must fail") { ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, "\x00") + assert_eq!(n, b"\x00") } _ => panic!("unexpected error"), }; @@ -157,14 +157,14 @@ fn validate_invalid_names() { { let d = Directory { symlinks: vec![SymlinkNode { - name: "foo/bar".to_string(), - target: "foo".to_string(), + name: "foo/bar".into(), + target: "foo".into(), }], ..Default::default() }; match d.validate().expect_err("must fail") { ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, "foo/bar") + assert_eq!(n, b"foo/bar") } _ => panic!("unexpected error"), }; @@ -175,7 +175,7 @@ fn validate_invalid_names() { fn validate_invalid_digest() { let d = Directory { directories: vec![DirectoryNode { - name: "foo".to_string(), + name: "foo".into(), digest: vec![0x00, 0x42], // invalid length size: 42, }], @@ -196,12 +196,12 @@ fn validate_sorting() { let d = Directory { directories: vec![ DirectoryNode { - name: "b".to_string(), + name: "b".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, DirectoryNode { - name: "a".to_string(), + name: "a".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, @@ -210,7 +210,7 @@ fn validate_sorting() { }; match d.validate().expect_err("must fail") { ValidateDirectoryError::WrongSorting(s) => { - assert_eq!(s, "a".to_string()); + assert_eq!(s, b"a"); } _ => panic!("unexpected error"), } @@ -221,12 +221,12 @@ fn validate_sorting() { let d = Directory { directories: vec![ DirectoryNode { - name: "a".to_string(), + name: "a".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, DirectoryNode { - name: "a".to_string(), + name: "a".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, @@ -235,7 +235,7 @@ fn validate_sorting() { }; match d.validate().expect_err("must fail") { ValidateDirectoryError::DuplicateName(s) => { - assert_eq!(s, "a".to_string()); + assert_eq!(s, b"a"); } _ => panic!("unexpected error"), } @@ -246,12 +246,12 @@ fn validate_sorting() { let d = Directory { directories: vec![ DirectoryNode { - name: "a".to_string(), + name: "a".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, DirectoryNode { - name: "b".to_string(), + name: "b".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, @@ -267,19 +267,19 @@ fn validate_sorting() { let d = Directory { directories: vec![ DirectoryNode { - name: "b".to_string(), + name: "b".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, DirectoryNode { - name: "c".to_string(), + name: "c".into(), digest: DUMMY_DIGEST.to_vec(), size: 42, }, ], symlinks: vec![SymlinkNode { - name: "a".to_string(), - target: "foo".to_string(), + name: "a".into(), + target: "foo".into(), }], ..Default::default() }; diff --git a/tvix/store/src/proto/tests/directory_nodes_iterator.rs b/tvix/store/src/proto/tests/directory_nodes_iterator.rs index 9a283f72bd45..68f147a33210 100644 --- a/tvix/store/src/proto/tests/directory_nodes_iterator.rs +++ b/tvix/store/src/proto/tests/directory_nodes_iterator.rs @@ -1,7 +1,7 @@ -use crate::proto::node::Node; use crate::proto::Directory; use crate::proto::DirectoryNode; use crate::proto::FileNode; +use crate::proto::NamedNode; use crate::proto::SymlinkNode; #[test] @@ -9,68 +9,66 @@ fn iterator() { let d = Directory { directories: vec![ DirectoryNode { - name: "c".to_string(), + name: "c".into(), ..DirectoryNode::default() }, DirectoryNode { - name: "d".to_string(), + name: "d".into(), ..DirectoryNode::default() }, DirectoryNode { - name: "h".to_string(), + name: "h".into(), ..DirectoryNode::default() }, DirectoryNode { - name: "l".to_string(), + name: "l".into(), ..DirectoryNode::default() }, ], files: vec![ FileNode { - name: "b".to_string(), + name: "b".into(), ..FileNode::default() }, FileNode { - name: "e".to_string(), + name: "e".into(), ..FileNode::default() }, FileNode { - name: "g".to_string(), + name: "g".into(), ..FileNode::default() }, FileNode { - name: "j".to_string(), + name: "j".into(), ..FileNode::default() }, ], symlinks: vec![ SymlinkNode { - name: "a".to_string(), + name: "a".into(), ..SymlinkNode::default() }, SymlinkNode { - name: "f".to_string(), + name: "f".into(), ..SymlinkNode::default() }, SymlinkNode { - name: "i".to_string(), + name: "i".into(), ..SymlinkNode::default() }, SymlinkNode { - name: "k".to_string(), + name: "k".into(), ..SymlinkNode::default() }, ], }; + // We keep this strings here and convert to string to make the comparison + // less messy. let mut node_names: Vec<String> = vec![]; for node in d.nodes() { - match node { - Node::Directory(n) => node_names.push(n.name), - Node::File(n) => node_names.push(n.name), - Node::Symlink(n) => node_names.push(n.name), - }; + node_names.push(String::from_utf8(node.get_name().to_vec()).unwrap()); } assert_eq!( diff --git a/tvix/store/src/proto/tests/grpc_directoryservice.rs b/tvix/store/src/proto/tests/grpc_directoryservice.rs index a1058706d521..73ce0082d3ca 100644 --- a/tvix/store/src/proto/tests/grpc_directoryservice.rs +++ b/tvix/store/src/proto/tests/grpc_directoryservice.rs @@ -190,8 +190,8 @@ async fn put_reject_failed_validation() { // construct a broken Directory message that fails validation let broken_directory = Directory { symlinks: vec![SymlinkNode { - name: "".to_string(), - target: "doesntmatter".to_string(), + name: "".into(), + target: "doesntmatter".into(), }], ..Default::default() }; @@ -214,7 +214,7 @@ async fn put_reject_wrong_size() { // Construct a directory referring to DIRECTORY_A, but with wrong size. let broken_parent_directory = Directory { directories: vec![DirectoryNode { - name: "foo".to_string(), + name: "foo".into(), digest: DIRECTORY_A.digest().to_vec(), size: 42, }], diff --git a/tvix/store/src/proto/tests/grpc_pathinfoservice.rs b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs index 186461d16528..57c88c2863b1 100644 --- a/tvix/store/src/proto/tests/grpc_pathinfoservice.rs +++ b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs @@ -48,8 +48,8 @@ async fn put_get() { let path_info = PathInfo { node: Some(Node { node: Some(Symlink(SymlinkNode { - name: "00000000000000000000000000000000-foo".to_string(), - target: "doesntmatter".to_string(), + name: "00000000000000000000000000000000-foo".into(), + target: "doesntmatter".into(), })), }), ..Default::default() diff --git a/tvix/store/src/proto/tests/pathinfo.rs b/tvix/store/src/proto/tests/pathinfo.rs index bccec539f9f3..a14554ee4fd3 100644 --- a/tvix/store/src/proto/tests/pathinfo.rs +++ b/tvix/store/src/proto/tests/pathinfo.rs @@ -43,7 +43,7 @@ fn validate_no_node( #[test_case( proto::DirectoryNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.to_vec(), size: 0, }, @@ -52,7 +52,7 @@ fn validate_no_node( )] #[test_case( proto::DirectoryNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), digest: vec![], size: 0, }, @@ -61,12 +61,12 @@ fn validate_no_node( )] #[test_case( proto::DirectoryNode { - name: "invalid".to_string(), + name: "invalid".into(), digest: DUMMY_DIGEST.to_vec(), size: 0, }, Err(ValidatePathInfoError::InvalidNodeName( - "invalid".to_string(), + "invalid".into(), store_path::Error::InvalidLength() )); "invalid node name" @@ -87,7 +87,7 @@ fn validate_directory( #[test_case( proto::FileNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.to_vec(), size: 0, executable: false, @@ -97,7 +97,7 @@ fn validate_directory( )] #[test_case( proto::FileNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), digest: vec![], ..Default::default() }, @@ -106,12 +106,12 @@ fn validate_directory( )] #[test_case( proto::FileNode { - name: "invalid".to_string(), + name: "invalid".into(), digest: DUMMY_DIGEST.to_vec(), ..Default::default() }, Err(ValidatePathInfoError::InvalidNodeName( - "invalid".to_string(), + "invalid".into(), store_path::Error::InvalidLength() )); "invalid node name" @@ -129,7 +129,7 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid #[test_case( proto::SymlinkNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), ..Default::default() }, Ok(StorePath::from_str(DUMMY_NAME).expect("must succeed")); @@ -137,11 +137,11 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid )] #[test_case( proto::SymlinkNode { - name: "invalid".to_string(), + name: "invalid".into(), ..Default::default() }, Err(ValidatePathInfoError::InvalidNodeName( - "invalid".to_string(), + "invalid".into(), store_path::Error::InvalidLength() )); "invalid node name" @@ -166,7 +166,7 @@ fn validate_references() { let path_info = PathInfo { node: Some(Node { node: Some(proto::node::Node::Directory(proto::DirectoryNode { - name: DUMMY_NAME.to_string(), + name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.to_vec(), size: 0, })), diff --git a/tvix/store/src/store_io.rs b/tvix/store/src/store_io.rs index fcbc5842a3e2..701b52f667ac 100644 --- a/tvix/store/src/store_io.rs +++ b/tvix/store/src/store_io.rs @@ -7,7 +7,6 @@ use nix_compat::{ nixhash::{HashAlgo, NixHash, NixHashWithMode}, store_path::{build_regular_ca_path, StorePath}, }; -use smol_str::SmolStr; use std::{io, path::Path, path::PathBuf, sync::Arc}; use tracing::{error, instrument, warn}; use tvix_eval::{EvalIO, FileType, StdIO}; @@ -130,7 +129,7 @@ impl TvixStoreIO { // assemble a new root_node with a name that is derived from the nar hash. let renamed_root_node = { - let name = output_path.to_string(); + let name = output_path.to_string().into_bytes(); match root_node { crate::proto::node::Node::Directory(n) => { @@ -265,7 +264,7 @@ impl EvalIO for TvixStoreIO { } #[instrument(skip(self), ret, err)] - fn read_dir(&self, path: &Path) -> Result<Vec<(SmolStr, FileType)>, io::Error> { + fn read_dir(&self, path: &Path) -> Result<Vec<(Vec<u8>, FileType)>, io::Error> { if let Ok((store_path, sub_path)) = StorePath::from_absolute_path_full(&path.to_string_lossy()) { @@ -285,17 +284,17 @@ impl EvalIO for TvixStoreIO { })?; if let Some(directory) = self.directory_service.get(&digest)? { - let mut children: Vec<(SmolStr, FileType)> = Vec::new(); + let mut children: Vec<(Vec<u8>, FileType)> = Vec::new(); for node in directory.nodes() { children.push(match node { crate::proto::node::Node::Directory(e) => { - (e.name.into(), FileType::Directory) + (e.name, FileType::Directory) } crate::proto::node::Node::File(e) => { - (e.name.into(), FileType::Regular) + (e.name, FileType::Regular) } crate::proto::node::Node::Symlink(e) => { - (e.name.into(), FileType::Symlink) + (e.name, FileType::Symlink) } }) } @@ -338,11 +337,20 @@ impl EvalIO for TvixStoreIO { let path_info = self.import_path_with_pathinfo(path)?; // from the [PathInfo], extract the store path (as string). - let mut path = PathBuf::from(nix_compat::store_path::STORE_DIR_WITH_SLASH); - path.push(path_info.node.unwrap().node.unwrap().get_name()); + Ok({ + let mut path = PathBuf::from(nix_compat::store_path::STORE_DIR_WITH_SLASH); - // and return it - Ok(path) + let root_node_name = path_info.node.unwrap().node.unwrap().get_name().to_vec(); + + // This must be a string, otherwise it would have failed validation. + let root_node_name = String::from_utf8(root_node_name).unwrap(); + + // append to the PathBuf + path.push(root_node_name); + + // and return it + path + }) } #[instrument(skip(self), ret)] diff --git a/tvix/store/src/tests/fixtures.rs b/tvix/store/src/tests/fixtures.rs index 934d9e4c5302..a1df729f1c5c 100644 --- a/tvix/store/src/tests/fixtures.rs +++ b/tvix/store/src/tests/fixtures.rs @@ -33,7 +33,7 @@ lazy_static! { pub static ref DIRECTORY_WITH_KEEP: proto::Directory = proto::Directory { directories: vec![], files: vec![FileNode { - name: ".keep".to_string(), + name: b".keep".to_vec(), digest: EMPTY_BLOB_DIGEST.to_vec(), size: 0, executable: false, @@ -42,25 +42,25 @@ lazy_static! { }; pub static ref DIRECTORY_COMPLICATED: proto::Directory = proto::Directory { directories: vec![DirectoryNode { - name: "keep".to_string(), + name: b"keep".to_vec(), digest: DIRECTORY_WITH_KEEP.digest().to_vec(), size: DIRECTORY_WITH_KEEP.size(), }], files: vec![FileNode { - name: ".keep".to_string(), + name: b".keep".to_vec(), digest: EMPTY_BLOB_DIGEST.to_vec(), size: 0, executable: false, }], symlinks: vec![SymlinkNode { - name: "aa".to_string(), - target: "/nix/store/somewhereelse".to_string(), + name: b"aa".to_vec(), + target: b"/nix/store/somewhereelse".to_vec(), }], }; pub static ref DIRECTORY_A: Directory = Directory::default(); pub static ref DIRECTORY_B: Directory = Directory { directories: vec![DirectoryNode { - name: "a".to_string(), + name: b"a".to_vec(), digest: DIRECTORY_A.digest().to_vec(), size: DIRECTORY_A.size(), }], @@ -69,12 +69,12 @@ lazy_static! { pub static ref DIRECTORY_C: Directory = Directory { directories: vec![ DirectoryNode { - name: "a".to_string(), + name: b"a".to_vec(), digest: DIRECTORY_A.digest().to_vec(), size: DIRECTORY_A.size(), }, DirectoryNode { - name: "a'".to_string(), + name: b"a'".to_vec(), digest: DIRECTORY_A.digest().to_vec(), size: DIRECTORY_A.size(), } diff --git a/tvix/store/src/tests/import.rs b/tvix/store/src/tests/import.rs index ab6557421947..291501f72768 100644 --- a/tvix/store/src/tests/import.rs +++ b/tvix/store/src/tests/import.rs @@ -6,6 +6,9 @@ use crate::tests::fixtures::*; use tempfile::TempDir; #[cfg(target_family = "unix")] +use std::os::unix::ffi::OsStrExt; + +#[cfg(target_family = "unix")] #[test] fn symlink() { let tmpdir = TempDir::new().unwrap(); @@ -26,8 +29,8 @@ fn symlink() { assert_eq!( crate::proto::node::Node::Symlink(proto::SymlinkNode { - name: "doesntmatter".to_string(), - target: "/nix/store/somewhereelse".to_string(), + name: "doesntmatter".into(), + target: "/nix/store/somewhereelse".into(), }), root_node, ) @@ -50,7 +53,7 @@ fn single_file() { assert_eq!( crate::proto::node::Node::File(proto::FileNode { - name: "root".to_string(), + name: "root".into(), digest: HELLOWORLD_BLOB_DIGEST.to_vec(), size: HELLOWORLD_BLOB_CONTENTS.len() as u32, executable: false, @@ -62,6 +65,7 @@ fn single_file() { assert!(blob_service.has(&HELLOWORLD_BLOB_DIGEST).unwrap()); } +#[cfg(target_family = "unix")] #[test] fn complicated() { let tmpdir = TempDir::new().unwrap(); @@ -88,12 +92,7 @@ fn complicated() { // ensure root_node matched expectations assert_eq!( crate::proto::node::Node::Directory(proto::DirectoryNode { - name: tmpdir - .path() - .file_name() - .unwrap() - .to_string_lossy() - .to_string(), + name: tmpdir.path().file_name().unwrap().as_bytes().to_vec(), digest: DIRECTORY_COMPLICATED.digest().to_vec(), size: DIRECTORY_COMPLICATED.size(), }), diff --git a/tvix/store/src/tests/nar_renderer.rs b/tvix/store/src/tests/nar_renderer.rs index b92fdc087b28..055538376b72 100644 --- a/tvix/store/src/tests/nar_renderer.rs +++ b/tvix/store/src/tests/nar_renderer.rs @@ -15,8 +15,8 @@ fn single_symlink() { write_nar( &mut buf, &crate::proto::node::Node::Symlink(SymlinkNode { - name: "doesntmatter".to_string(), - target: "/nix/store/somewhereelse".to_string(), + name: "doesntmatter".into(), + target: "/nix/store/somewhereelse".into(), }), // don't put anything in the stores, as we don't actually do any requests. gen_blob_service(), @@ -35,7 +35,7 @@ fn single_file_missing_blob() { let e = write_nar( &mut buf, &crate::proto::node::Node::File(FileNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: HELLOWORLD_BLOB_DIGEST.to_vec(), size: HELLOWORLD_BLOB_CONTENTS.len() as u32, executable: false, @@ -76,7 +76,7 @@ fn single_file_wrong_blob_size() { let e = write_nar( &mut buf, &crate::proto::node::Node::File(FileNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: HELLOWORLD_BLOB_DIGEST.to_vec(), size: 42, // <- note the wrong size here! executable: false, @@ -101,7 +101,7 @@ fn single_file_wrong_blob_size() { let e = write_nar( &mut buf, &crate::proto::node::Node::File(FileNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: HELLOWORLD_BLOB_DIGEST.to_vec(), size: 2, // <- note the wrong size here! executable: false, @@ -138,7 +138,7 @@ fn single_file() { write_nar( &mut buf, &crate::proto::node::Node::File(FileNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: HELLOWORLD_BLOB_DIGEST.to_vec(), size: HELLOWORLD_BLOB_CONTENTS.len() as u32, executable: false, @@ -176,7 +176,7 @@ fn test_complicated() { write_nar( &mut buf, &crate::proto::node::Node::Directory(DirectoryNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: DIRECTORY_COMPLICATED.digest().to_vec(), size: DIRECTORY_COMPLICATED.size(), }), @@ -190,7 +190,7 @@ fn test_complicated() { // ensure calculate_nar does return the correct sha256 digest and sum. let (nar_size, nar_digest) = calculate_size_and_sha256( &crate::proto::node::Node::Directory(DirectoryNode { - name: "doesntmatter".to_string(), + name: "doesntmatter".into(), digest: DIRECTORY_COMPLICATED.digest().to_vec(), size: DIRECTORY_COMPLICATED.size(), }), |