diff options
author | Florian Klink <flokli@flokli.de> | 2023-09-21T19·32+0300 |
---|---|---|
committer | clbot <clbot@tvl.fyi> | 2023-09-22T12·51+0000 |
commit | 32f41458c0a0f62bf906021ef096c465ccc45581 (patch) | |
tree | 3aaab8c453871f39c46fb43f8278aa933b24519d /tvix/store/src/proto/tests | |
parent | d8ef0cfb4a859af7e33828b013356412d02532da (diff) |
refactor(tvix): move castore into tvix-castore crate r/6629
This splits the pure content-addressed layers from tvix-store into a `castore` crate, and only leaves PathInfo related things, as well as the CLI entrypoint in the tvix-store crate. Notable changes: - `fixtures` and `utils` had to be moved out of the `test` cfg, so they can be imported from tvix-store. - Some ad-hoc fixtures in the test were moved to proper fixtures in the same step. - The protos are now created by a (more static) recipe in the protos/ directory. The (now two) golang targets are commented out, as it's not possible to update them properly in the same CL. This will be done by a followup CL once this is merged (and whitby deployed) Bug: https://b.tvl.fyi/issues/301 Change-Id: I8d675d4bf1fb697eb7d479747c1b1e3635718107 Reviewed-on: https://cl.tvl.fyi/c/depot/+/9370 Reviewed-by: tazjin <tazjin@tvl.su> Reviewed-by: flokli <flokli@flokli.de> Autosubmit: flokli <flokli@flokli.de> Tested-by: BuildkiteCI Reviewed-by: Connor Brewster <cbrewster@hey.com>
Diffstat (limited to 'tvix/store/src/proto/tests')
-rw-r--r-- | tvix/store/src/proto/tests/directory.rs | 287 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/directory_nodes_iterator.rs | 78 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/grpc_blobservice.rs | 100 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/grpc_directoryservice.rs | 239 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/grpc_pathinfoservice.rs | 8 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/mod.rs | 4 | ||||
-rw-r--r-- | tvix/store/src/proto/tests/pathinfo.rs | 76 |
7 files changed, 33 insertions, 759 deletions
diff --git a/tvix/store/src/proto/tests/directory.rs b/tvix/store/src/proto/tests/directory.rs deleted file mode 100644 index eed49b2b593c..000000000000 --- a/tvix/store/src/proto/tests/directory.rs +++ /dev/null @@ -1,287 +0,0 @@ -use crate::proto::{Directory, DirectoryNode, FileNode, SymlinkNode, ValidateDirectoryError}; -use lazy_static::lazy_static; - -lazy_static! { - static ref DUMMY_DIGEST: [u8; 32] = [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, - ]; -} -#[test] -fn size() { - { - let d = Directory::default(); - assert_eq!(d.size(), 0); - } - { - let d = Directory { - directories: vec![DirectoryNode { - name: "foo".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 0, - }], - ..Default::default() - }; - assert_eq!(d.size(), 1); - } - { - let d = Directory { - directories: vec![DirectoryNode { - name: "foo".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 4, - }], - ..Default::default() - }; - assert_eq!(d.size(), 5); - } - { - let d = Directory { - files: vec![FileNode { - name: "foo".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - executable: false, - }], - ..Default::default() - }; - assert_eq!(d.size(), 1); - } - { - let d = Directory { - symlinks: vec![SymlinkNode { - name: "foo".into(), - target: "bar".into(), - }], - ..Default::default() - }; - assert_eq!(d.size(), 1); - } -} - -#[test] -fn digest() { - let d = Directory::default(); - - assert_eq!( - d.digest(), - vec![ - 0xaf, 0x13, 0x49, 0xb9, 0xf5, 0xf9, 0xa1, 0xa6, 0xa0, 0x40, 0x4d, 0xea, 0x36, 0xdc, - 0xc9, 0x49, 0x9b, 0xcb, 0x25, 0xc9, 0xad, 0xc1, 0x12, 0xb7, 0xcc, 0x9a, 0x93, 0xca, - 0xe4, 0x1f, 0x32, 0x62 - ] - .try_into() - .unwrap() - ) -} - -#[test] -fn validate_empty() { - let d = Directory::default(); - assert_eq!(d.validate(), Ok(())); -} - -#[test] -fn validate_invalid_names() { - { - let d = Directory { - directories: vec![DirectoryNode { - name: "".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, b"") - } - _ => panic!("unexpected error"), - }; - } - - { - let d = Directory { - directories: vec![DirectoryNode { - name: ".".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, b".") - } - _ => panic!("unexpected error"), - }; - } - - { - let d = Directory { - files: vec![FileNode { - name: "..".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - executable: false, - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, b"..") - } - _ => panic!("unexpected error"), - }; - } - - { - let d = Directory { - symlinks: vec![SymlinkNode { - name: "\x00".into(), - target: "foo".into(), - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, b"\x00") - } - _ => panic!("unexpected error"), - }; - } - - { - let d = Directory { - symlinks: vec![SymlinkNode { - name: "foo/bar".into(), - target: "foo".into(), - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidName(n) => { - assert_eq!(n, b"foo/bar") - } - _ => panic!("unexpected error"), - }; - } -} - -#[test] -fn validate_invalid_digest() { - let d = Directory { - directories: vec![DirectoryNode { - name: "foo".into(), - digest: vec![0x00, 0x42].into(), // invalid length - size: 42, - }], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::InvalidDigestLen(n) => { - assert_eq!(n, 2) - } - _ => panic!("unexpected error"), - } -} - -#[test] -fn validate_sorting() { - // "b" comes before "a", bad. - { - let d = Directory { - directories: vec![ - DirectoryNode { - name: "b".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - DirectoryNode { - name: "a".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - ], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::WrongSorting(s) => { - assert_eq!(s, b"a"); - } - _ => panic!("unexpected error"), - } - } - - // "a" exists twice, bad. - { - let d = Directory { - directories: vec![ - DirectoryNode { - name: "a".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - DirectoryNode { - name: "a".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - ], - ..Default::default() - }; - match d.validate().expect_err("must fail") { - ValidateDirectoryError::DuplicateName(s) => { - assert_eq!(s, b"a"); - } - _ => panic!("unexpected error"), - } - } - - // "a" comes before "b", all good. - { - let d = Directory { - directories: vec![ - DirectoryNode { - name: "a".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - DirectoryNode { - name: "b".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - ], - ..Default::default() - }; - - d.validate().expect("validate shouldn't error"); - } - - // [b, c] and [a] are both properly sorted. - { - let d = Directory { - directories: vec![ - DirectoryNode { - name: "b".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - DirectoryNode { - name: "c".into(), - digest: DUMMY_DIGEST.to_vec().into(), - size: 42, - }, - ], - symlinks: vec![SymlinkNode { - name: "a".into(), - target: "foo".into(), - }], - ..Default::default() - }; - - d.validate().expect("validate shouldn't error"); - } -} diff --git a/tvix/store/src/proto/tests/directory_nodes_iterator.rs b/tvix/store/src/proto/tests/directory_nodes_iterator.rs deleted file mode 100644 index 68f147a33210..000000000000 --- a/tvix/store/src/proto/tests/directory_nodes_iterator.rs +++ /dev/null @@ -1,78 +0,0 @@ -use crate::proto::Directory; -use crate::proto::DirectoryNode; -use crate::proto::FileNode; -use crate::proto::NamedNode; -use crate::proto::SymlinkNode; - -#[test] -fn iterator() { - let d = Directory { - directories: vec![ - DirectoryNode { - name: "c".into(), - ..DirectoryNode::default() - }, - DirectoryNode { - name: "d".into(), - ..DirectoryNode::default() - }, - DirectoryNode { - name: "h".into(), - ..DirectoryNode::default() - }, - DirectoryNode { - name: "l".into(), - ..DirectoryNode::default() - }, - ], - files: vec![ - FileNode { - name: "b".into(), - ..FileNode::default() - }, - FileNode { - name: "e".into(), - ..FileNode::default() - }, - FileNode { - name: "g".into(), - ..FileNode::default() - }, - FileNode { - name: "j".into(), - ..FileNode::default() - }, - ], - symlinks: vec![ - SymlinkNode { - name: "a".into(), - ..SymlinkNode::default() - }, - SymlinkNode { - name: "f".into(), - ..SymlinkNode::default() - }, - SymlinkNode { - name: "i".into(), - ..SymlinkNode::default() - }, - SymlinkNode { - name: "k".into(), - ..SymlinkNode::default() - }, - ], - }; - - // We keep this strings here and convert to string to make the comparison - // less messy. - let mut node_names: Vec<String> = vec![]; - - for node in d.nodes() { - node_names.push(String::from_utf8(node.get_name().to_vec()).unwrap()); - } - - assert_eq!( - vec!["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"], - node_names - ); -} diff --git a/tvix/store/src/proto/tests/grpc_blobservice.rs b/tvix/store/src/proto/tests/grpc_blobservice.rs deleted file mode 100644 index 497893f03dd7..000000000000 --- a/tvix/store/src/proto/tests/grpc_blobservice.rs +++ /dev/null @@ -1,100 +0,0 @@ -use crate::proto::blob_service_server::BlobService as GRPCBlobService; -use crate::proto::{BlobChunk, GRPCBlobServiceWrapper, ReadBlobRequest, StatBlobRequest}; -use crate::tests::fixtures::{BLOB_A, BLOB_A_DIGEST}; -use crate::tests::utils::gen_blob_service; -use tokio_stream::StreamExt; - -fn gen_grpc_blob_service() -> GRPCBlobServiceWrapper { - let blob_service = gen_blob_service(); - GRPCBlobServiceWrapper::from(blob_service) -} - -/// Trying to read a non-existent blob should return a not found error. -#[tokio::test] -async fn not_found_read() { - let service = gen_grpc_blob_service(); - - let resp = service - .read(tonic::Request::new(ReadBlobRequest { - digest: BLOB_A_DIGEST.clone().into(), - })) - .await; - - // We can't use unwrap_err here, because the Ok value doesn't implement - // debug. - if let Err(e) = resp { - assert_eq!(e.code(), tonic::Code::NotFound); - } else { - panic!("resp is not err") - } -} - -/// Trying to stat a non-existent blob should return a not found error. -#[tokio::test] -async fn not_found_stat() { - let service = gen_grpc_blob_service(); - - let resp = service - .stat(tonic::Request::new(StatBlobRequest { - digest: BLOB_A_DIGEST.clone().into(), - ..Default::default() - })) - .await - .expect_err("must fail"); - - // The resp should be a status with Code::NotFound - assert_eq!(resp.code(), tonic::Code::NotFound); -} - -/// Put a blob in the store, get it back. -#[tokio::test] -async fn put_read_stat() { - let service = gen_grpc_blob_service(); - - // Send blob A. - let put_resp = service - .put(tonic_mock::streaming_request(vec![BlobChunk { - data: BLOB_A.clone(), - }])) - .await - .expect("must succeed") - .into_inner(); - - assert_eq!(BLOB_A_DIGEST.to_vec(), put_resp.digest); - - // Stat for the digest of A. - // We currently don't ask for more granular chunking data, as we don't - // expose it yet. - let _resp = service - .stat(tonic::Request::new(StatBlobRequest { - digest: BLOB_A_DIGEST.clone().into(), - ..Default::default() - })) - .await - .expect("must succeed") - .into_inner(); - - // Read the blob. It should return the same data. - let resp = service - .read(tonic::Request::new(ReadBlobRequest { - digest: BLOB_A_DIGEST.clone().into(), - })) - .await; - - let mut rx = resp.ok().unwrap().into_inner(); - - // the stream should contain one element, a BlobChunk with the same contents as BLOB_A. - let item = rx - .next() - .await - .expect("must be some") - .expect("must succeed"); - - assert_eq!(BLOB_A.clone(), item.data); - - // … and no more elements - assert!(rx.next().await.is_none()); - - // TODO: we rely here on the blob being small enough to not get broken up into multiple chunks. - // Test with some bigger blob too -} diff --git a/tvix/store/src/proto/tests/grpc_directoryservice.rs b/tvix/store/src/proto/tests/grpc_directoryservice.rs deleted file mode 100644 index a5300039fb9f..000000000000 --- a/tvix/store/src/proto/tests/grpc_directoryservice.rs +++ /dev/null @@ -1,239 +0,0 @@ -use crate::proto::directory_service_server::DirectoryService as GRPCDirectoryService; -use crate::proto::get_directory_request::ByWhat; -use crate::proto::{Directory, DirectoryNode, SymlinkNode}; -use crate::proto::{GRPCDirectoryServiceWrapper, GetDirectoryRequest}; -use crate::tests::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C}; -use crate::tests::utils::gen_directory_service; -use tokio_stream::StreamExt; -use tonic::Status; - -fn gen_grpc_service() -> GRPCDirectoryServiceWrapper { - let directory_service = gen_directory_service(); - GRPCDirectoryServiceWrapper::from(directory_service) -} - -/// Send the specified GetDirectoryRequest. -/// Returns an error in the case of an error response, or an error in one of -// the items in the stream, or a Vec<Directory> in the case of a successful -/// request. -async fn get_directories<S: GRPCDirectoryService>( - svc: &S, - get_directory_request: GetDirectoryRequest, -) -> Result<Vec<Directory>, Status> { - let resp = svc.get(tonic::Request::new(get_directory_request)).await; - - // if the response is an error itself, return the error, otherwise unpack - let stream = match resp { - Ok(resp) => resp, - Err(status) => return Err(status), - } - .into_inner(); - - let directory_results: Vec<Result<Directory, Status>> = stream.collect().await; - - // turn Vec<Result<Directory, Status> into Result<Vec<Directory>,Status> - directory_results.into_iter().collect() -} - -/// Trying to get a non-existent Directory should return a not found error. -#[tokio::test] -async fn not_found() { - let service = gen_grpc_service(); - - let resp = service - .get(tonic::Request::new(GetDirectoryRequest { - by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().into())), - ..Default::default() - })) - .await; - - let mut rx = resp.expect("must succeed").into_inner().into_inner(); - - // The stream should contain one element, an error with Code::NotFound. - let item = rx - .recv() - .await - .expect("must be some") - .expect_err("must be err"); - assert_eq!(item.code(), tonic::Code::NotFound); - - // … and nothing else - assert!(rx.recv().await.is_none()); -} - -/// Put a Directory into the store, get it back. -#[tokio::test] -async fn put_get() { - let service = gen_grpc_service(); - - let streaming_request = tonic_mock::streaming_request(vec![DIRECTORY_A.clone()]); - let put_resp = service - .put(streaming_request) - .await - .expect("must succeed") - .into_inner(); - - // the sent root_digest should match the calculated digest - assert_eq!(put_resp.root_digest, DIRECTORY_A.digest().to_vec()); - - // get it back - let items = get_directories( - &service, - GetDirectoryRequest { - by_what: Some(ByWhat::Digest(DIRECTORY_A.digest().into())), - ..Default::default() - }, - ) - .await - .expect("must not error"); - - assert_eq!(vec![DIRECTORY_A.clone()], items); -} - -/// Put multiple Directories into the store, and get them back -#[tokio::test] -async fn put_get_multiple() { - let service = gen_grpc_service(); - - // sending "b" (which refers to "a") without sending "a" first should fail. - let put_resp = service - .put(tonic_mock::streaming_request(vec![DIRECTORY_B.clone()])) - .await - .expect_err("must fail"); - - assert_eq!(tonic::Code::InvalidArgument, put_resp.code()); - - // sending "a", then "b" should succeed, and the response should contain the digest of b. - let put_resp = service - .put(tonic_mock::streaming_request(vec![ - DIRECTORY_A.clone(), - DIRECTORY_B.clone(), - ])) - .await - .expect("must succeed"); - - assert_eq!( - DIRECTORY_B.digest().to_vec(), - put_resp.into_inner().root_digest - ); - - // now, request b, first in non-recursive mode. - let items = get_directories( - &service, - GetDirectoryRequest { - recursive: false, - by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().into())), - }, - ) - .await - .expect("must not error"); - - // We expect to only get b. - assert_eq!(vec![DIRECTORY_B.clone()], items); - - // now, request b, but in recursive mode. - let items = get_directories( - &service, - GetDirectoryRequest { - recursive: true, - by_what: Some(ByWhat::Digest(DIRECTORY_B.digest().into())), - }, - ) - .await - .expect("must not error"); - - // We expect to get b, and then a, because that's how we traverse down. - assert_eq!(vec![DIRECTORY_B.clone(), DIRECTORY_A.clone()], items); -} - -/// Put multiple Directories into the store, and omit duplicates. -#[tokio::test] -async fn put_get_dedup() { - let service = gen_grpc_service(); - - // Send "A", then "C", which refers to "A" two times - // Pretend we're a dumb client sending A twice. - let put_resp = service - .put(tonic_mock::streaming_request(vec![ - DIRECTORY_A.clone(), - DIRECTORY_A.clone(), - DIRECTORY_C.clone(), - ])) - .await - .expect("must succeed"); - - assert_eq!( - DIRECTORY_C.digest().to_vec(), - put_resp.into_inner().root_digest - ); - - // Ask for "C" recursively. We expect to only get "A" once, as there's no point sending it twice. - let items = get_directories( - &service, - GetDirectoryRequest { - recursive: true, - by_what: Some(ByWhat::Digest(DIRECTORY_C.digest().into())), - }, - ) - .await - .expect("must not error"); - - // We expect to get C, and then A (once, as the second A has been deduplicated). - assert_eq!(vec![DIRECTORY_C.clone(), DIRECTORY_A.clone()], items); -} - -/// Trying to upload a Directory failing validation should fail. -#[tokio::test] -async fn put_reject_failed_validation() { - let service = gen_grpc_service(); - - // construct a broken Directory message that fails validation - let broken_directory = Directory { - symlinks: vec![SymlinkNode { - name: "".into(), - target: "doesntmatter".into(), - }], - ..Default::default() - }; - assert!(broken_directory.validate().is_err()); - - // send it over, it must fail - let put_resp = service - .put(tonic_mock::streaming_request(vec![broken_directory])) - .await - .expect_err("must fail"); - - assert_eq!(put_resp.code(), tonic::Code::InvalidArgument); -} - -/// Trying to upload a Directory with wrong size should fail. -#[tokio::test] -async fn put_reject_wrong_size() { - let service = gen_grpc_service(); - - // Construct a directory referring to DIRECTORY_A, but with wrong size. - let broken_parent_directory = Directory { - directories: vec![DirectoryNode { - name: "foo".into(), - digest: DIRECTORY_A.digest().into(), - size: 42, - }], - ..Default::default() - }; - // Make sure we got the size wrong. - assert_ne!( - broken_parent_directory.directories[0].size, - DIRECTORY_A.size() - ); - - // now upload both (first A, then the broken parent). This must fail. - let put_resp = service - .put(tonic_mock::streaming_request(vec![ - DIRECTORY_A.clone(), - broken_parent_directory, - ])) - .await - .expect_err("must fail"); - - assert_eq!(put_resp.code(), tonic::Code::InvalidArgument); -} diff --git a/tvix/store/src/proto/tests/grpc_pathinfoservice.rs b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs index 114e89cacc10..c0b953d0f2e9 100644 --- a/tvix/store/src/proto/tests/grpc_pathinfoservice.rs +++ b/tvix/store/src/proto/tests/grpc_pathinfoservice.rs @@ -1,9 +1,8 @@ use crate::proto::get_path_info_request::ByWhat::ByOutputHash; -use crate::proto::node::Node::Symlink; use crate::proto::path_info_service_server::PathInfoService as GRPCPathInfoService; use crate::proto::GRPCPathInfoServiceWrapper; +use crate::proto::GetPathInfoRequest; use crate::proto::PathInfo; -use crate::proto::{GetPathInfoRequest, Node, SymlinkNode}; use crate::tests::fixtures::DUMMY_OUTPUT_HASH; use crate::tests::utils::gen_blob_service; use crate::tests::utils::gen_directory_service; @@ -11,6 +10,7 @@ use crate::tests::utils::gen_pathinfo_service; use std::sync::Arc; use tokio_stream::wrappers::ReceiverStream; use tonic::Request; +use tvix_castore::proto as castorepb; /// generates a GRPCPathInfoService out of blob, directory and pathinfo services. /// @@ -48,8 +48,8 @@ async fn put_get() { let service = gen_grpc_service(); let path_info = PathInfo { - node: Some(Node { - node: Some(Symlink(SymlinkNode { + node: Some(castorepb::Node { + node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode { name: "00000000000000000000000000000000-foo".into(), target: "doesntmatter".into(), })), diff --git a/tvix/store/src/proto/tests/mod.rs b/tvix/store/src/proto/tests/mod.rs index 0a96ea3a0d59..bff885624380 100644 --- a/tvix/store/src/proto/tests/mod.rs +++ b/tvix/store/src/proto/tests/mod.rs @@ -1,6 +1,2 @@ -mod directory; -mod directory_nodes_iterator; -mod grpc_blobservice; -mod grpc_directoryservice; mod grpc_pathinfoservice; mod pathinfo; diff --git a/tvix/store/src/proto/tests/pathinfo.rs b/tvix/store/src/proto/tests/pathinfo.rs index 779b46ed168e..dfbeb831d7d2 100644 --- a/tvix/store/src/proto/tests/pathinfo.rs +++ b/tvix/store/src/proto/tests/pathinfo.rs @@ -1,31 +1,10 @@ -use crate::proto::{self, Node, PathInfo, ValidatePathInfoError}; -use crate::B3Digest; +use crate::proto::{NarInfo, PathInfo, ValidatePathInfoError}; +use crate::tests::fixtures::*; use bytes::Bytes; -use lazy_static::lazy_static; use nix_compat::store_path::{self, StorePath}; use std::str::FromStr; use test_case::test_case; - -lazy_static! { - static ref DUMMY_DIGEST: B3Digest = { - let u: &[u8; 32] = &[ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, - ]; - u.into() - }; - static ref DUMMY_DIGEST_2: B3Digest = { - let u: &[u8; 32] = &[ - 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, - ]; - u.into() - }; -} - -const DUMMY_NAME: &str = "00000000000000000000000000000000-dummy"; +use tvix_castore::proto as castorepb; #[test_case( None, @@ -33,12 +12,12 @@ const DUMMY_NAME: &str = "00000000000000000000000000000000-dummy"; "No node" )] #[test_case( - Some(Node { node: None }), + Some(castorepb::Node { node: None }), Err(ValidatePathInfoError::NoNodePresent()); "No node 2" )] fn validate_no_node( - t_node: Option<proto::Node>, + t_node: Option<castorepb::Node>, t_result: Result<StorePath, ValidatePathInfoError>, ) { // construct the PathInfo object @@ -50,7 +29,7 @@ fn validate_no_node( } #[test_case( - proto::DirectoryNode { + castorepb::DirectoryNode { name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.clone().into(), size: 0, @@ -59,7 +38,7 @@ fn validate_no_node( "ok" )] #[test_case( - proto::DirectoryNode { + castorepb::DirectoryNode { name: DUMMY_NAME.into(), digest: Bytes::new(), size: 0, @@ -68,7 +47,7 @@ fn validate_no_node( "invalid digest length" )] #[test_case( - proto::DirectoryNode { + castorepb::DirectoryNode { name: "invalid".into(), digest: DUMMY_DIGEST.clone().into(), size: 0, @@ -80,13 +59,13 @@ fn validate_no_node( "invalid node name" )] fn validate_directory( - t_directory_node: proto::DirectoryNode, + t_directory_node: castorepb::DirectoryNode, t_result: Result<StorePath, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { - node: Some(Node { - node: Some(proto::node::Node::Directory(t_directory_node)), + node: Some(castorepb::Node { + node: Some(castorepb::node::Node::Directory(t_directory_node)), }), ..Default::default() }; @@ -94,7 +73,7 @@ fn validate_directory( } #[test_case( - proto::FileNode { + castorepb::FileNode { name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.clone().into(), size: 0, @@ -104,7 +83,7 @@ fn validate_directory( "ok" )] #[test_case( - proto::FileNode { + castorepb::FileNode { name: DUMMY_NAME.into(), digest: Bytes::new(), ..Default::default() @@ -113,7 +92,7 @@ fn validate_directory( "invalid digest length" )] #[test_case( - proto::FileNode { + castorepb::FileNode { name: "invalid".into(), digest: DUMMY_DIGEST.clone().into(), ..Default::default() @@ -124,11 +103,14 @@ fn validate_directory( )); "invalid node name" )] -fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, ValidatePathInfoError>) { +fn validate_file( + t_file_node: castorepb::FileNode, + t_result: Result<StorePath, ValidatePathInfoError>, +) { // construct the PathInfo object let p = PathInfo { - node: Some(Node { - node: Some(proto::node::Node::File(t_file_node)), + node: Some(castorepb::Node { + node: Some(castorepb::node::Node::File(t_file_node)), }), ..Default::default() }; @@ -136,7 +118,7 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid } #[test_case( - proto::SymlinkNode { + castorepb::SymlinkNode { name: DUMMY_NAME.into(), ..Default::default() }, @@ -144,7 +126,7 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid "ok" )] #[test_case( - proto::SymlinkNode { + castorepb::SymlinkNode { name: "invalid".into(), ..Default::default() }, @@ -155,13 +137,13 @@ fn validate_file(t_file_node: proto::FileNode, t_result: Result<StorePath, Valid "invalid node name" )] fn validate_symlink( - t_symlink_node: proto::SymlinkNode, + t_symlink_node: castorepb::SymlinkNode, t_result: Result<StorePath, ValidatePathInfoError>, ) { // construct the PathInfo object let p = PathInfo { - node: Some(Node { - node: Some(proto::node::Node::Symlink(t_symlink_node)), + node: Some(castorepb::Node { + node: Some(castorepb::node::Node::Symlink(t_symlink_node)), }), ..Default::default() }; @@ -172,8 +154,8 @@ fn validate_symlink( fn validate_references() { // create a PathInfo without narinfo field. let path_info = PathInfo { - node: Some(Node { - node: Some(proto::node::Node::Directory(proto::DirectoryNode { + node: Some(castorepb::Node { + node: Some(castorepb::node::Node::Directory(castorepb::DirectoryNode { name: DUMMY_NAME.into(), digest: DUMMY_DIGEST.clone().into(), size: 0, @@ -186,7 +168,7 @@ fn validate_references() { // create a PathInfo with a narinfo field, but an inconsistent set of references let path_info_with_narinfo_missing_refs = PathInfo { - narinfo: Some(proto::NarInfo { + narinfo: Some(NarInfo { nar_size: 0, nar_sha256: DUMMY_DIGEST.clone().into(), signatures: vec![], @@ -204,7 +186,7 @@ fn validate_references() { // create a pathinfo with the correct number of references, should suceed let path_info_with_narinfo = PathInfo { - narinfo: Some(proto::NarInfo { + narinfo: Some(NarInfo { nar_size: 0, nar_sha256: DUMMY_DIGEST.clone().into(), signatures: vec![], |