diff options
author | Florian Klink <flokli@flokli.de> | 2024-06-10T18·15+0300 |
---|---|---|
committer | clbot <clbot@tvl.fyi> | 2024-06-12T10·30+0000 |
commit | e1d3fa240a2adda5db004e32a72baa30b5d4d05a (patch) | |
tree | 01ee292dccb1ffb60a672b94342314dd6abce6c1 | |
parent | a7bf5b975f3980677458c8cc5302393343d05fda (diff) |
refactor(tvix/glue/fetchers): use named field for structs r/8250
This allows giving more self-speaking names, as well as documenting each field individually. Change-Id: Ide164d684b7f819aac279cc8e657c02fc24d093f Reviewed-on: https://cl.tvl.fyi/c/depot/+/11786 Tested-by: BuildkiteCI Autosubmit: flokli <flokli@flokli.de> Reviewed-by: Connor Brewster <cbrewster@hey.com>
-rw-r--r-- | tvix/glue/src/builtins/fetchers.rs | 16 | ||||
-rw-r--r-- | tvix/glue/src/fetchers/mod.rs | 74 | ||||
-rw-r--r-- | tvix/glue/src/known_paths.rs | 16 |
3 files changed, 76 insertions, 30 deletions
diff --git a/tvix/glue/src/builtins/fetchers.rs b/tvix/glue/src/builtins/fetchers.rs index c7602c03e81f..08a882586e9b 100644 --- a/tvix/glue/src/builtins/fetchers.rs +++ b/tvix/glue/src/builtins/fetchers.rs @@ -6,7 +6,6 @@ use crate::{ tvix_store_io::TvixStoreIO, }; use nix_compat::nixhash; -use nix_compat::nixhash::NixHash; use std::rc::Rc; use tracing::info; use tvix_eval::builtin_macros::builtins; @@ -80,6 +79,7 @@ async fn extract_fetch_args( #[builtins(state = "Rc<TvixStoreIO>")] pub(crate) mod fetcher_builtins { use crate::builtins::FetcherError; + use nix_compat::nixhash::NixHash; use url::Url; use super::*; @@ -147,7 +147,10 @@ pub(crate) mod fetcher_builtins { fetch_lazy( state, name, - Fetch::URL(url, args.sha256.map(NixHash::Sha256)), + Fetch::URL { + url, + exp_hash: args.sha256.map(NixHash::Sha256), + }, ) } @@ -172,7 +175,14 @@ pub(crate) mod fetcher_builtins { let url = Url::parse(&args.url_str) .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?; - fetch_lazy(state, name, Fetch::Tarball(url, args.sha256)) + fetch_lazy( + state, + name, + Fetch::Tarball { + url, + exp_nar_sha256: args.sha256, + }, + ) } #[builtin("fetchGit")] diff --git a/tvix/glue/src/fetchers/mod.rs b/tvix/glue/src/fetchers/mod.rs index 1b2e1ee20cc0..9ebed01c4f28 100644 --- a/tvix/glue/src/fetchers/mod.rs +++ b/tvix/glue/src/fetchers/mod.rs @@ -25,10 +25,14 @@ use decompression::DecompressedReader; /// Representing options for doing a fetch. #[derive(Clone, Eq, PartialEq)] pub enum Fetch { - /// Fetch a literal file from the given URL, with an optional expected - /// NixHash of it. - /// TODO: check if this is *always* sha256, and if so, make it [u8; 32]. - URL(Url, Option<NixHash>), + /// Fetch a literal file from the given URL, + /// with an optional expected hash. + URL { + /// The URL to fetch from. + url: Url, + /// The expected hash of the file. + exp_hash: Option<NixHash>, + }, /// Fetch a tarball from the given URL and unpack. /// The file must be a tape archive (.tar), optionally compressed with gzip, @@ -37,7 +41,12 @@ pub enum Fetch { /// so it is best if the tarball contains a single directory at top level. /// Optionally, a sha256 digest can be provided to verify the unpacked /// contents against. - Tarball(Url, Option<[u8; 32]>), + Tarball { + /// The URL to fetch from. + url: Url, + /// The expected hash of the contents, as NAR. + exp_nar_sha256: Option<[u8; 32]>, + }, /// TODO Git(), @@ -60,7 +69,7 @@ fn redact_url(url: &Url) -> Url { impl std::fmt::Debug for Fetch { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Fetch::URL(url, exp_hash) => { + Fetch::URL { url, exp_hash } => { let url = redact_url(url); if let Some(exp_hash) = exp_hash { write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash) @@ -68,14 +77,17 @@ impl std::fmt::Debug for Fetch { write!(f, "URL [url: {}, exp_hash: None]", &url) } } - Fetch::Tarball(url, exp_digest) => { + Fetch::Tarball { + url, + exp_nar_sha256, + } => { let url = redact_url(url); - if let Some(exp_digest) = exp_digest { + if let Some(exp_nar_sha256) = exp_nar_sha256 { write!( f, - "Tarball [url: {}, exp_hash: Some({})]", + "Tarball [url: {}, exp_nar_sha256: Some({})]", url, - NixHash::Sha256(*exp_digest) + NixHash::Sha256(*exp_nar_sha256) ) } else { write!(f, "Tarball [url: {}, exp_hash: None]", url) @@ -95,9 +107,24 @@ impl Fetch { name: &'a str, ) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> { let ca_hash = match self { - Fetch::URL(_, Some(nixhash)) => CAHash::Flat(nixhash.clone()), - Fetch::Tarball(_, Some(nar_sha256)) => CAHash::Nar(NixHash::Sha256(*nar_sha256)), - _ => return Ok(None), + Fetch::URL { + exp_hash: Some(exp_hash), + .. + } => CAHash::Flat(exp_hash.clone()), + + Fetch::Tarball { + exp_nar_sha256: Some(exp_nar_sha256), + .. + } => CAHash::Nar(NixHash::Sha256(*exp_nar_sha256)), + + Fetch::Git() => todo!(), + + // everything else + Fetch::URL { exp_hash: None, .. } + | Fetch::Tarball { + exp_nar_sha256: None, + .. + } => return Ok(None), }; // calculate the store path of this fetch @@ -190,7 +217,7 @@ where /// didn't match the previously communicated hash contained inside the FetchArgs. pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> { match fetch { - Fetch::URL(url, exp_hash) => { + Fetch::URL { url, exp_hash } => { // Construct a AsyncRead reading from the data as its downloaded. let mut r = self.download(url.clone()).await?; @@ -199,7 +226,7 @@ where // Copy the contents from the download reader to the blob writer. // Calculate the digest of the file received, depending on the - // communicated expected hash (or sha256 if none provided). + // communicated expected hash algo (or sha256 if none provided). let (actual_hash, blob_size) = match exp_hash .as_ref() .map(NixHash::algo) @@ -243,7 +270,10 @@ where blob_size, )) } - Fetch::Tarball(url, exp_nar_sha256) => { + Fetch::Tarball { + url, + exp_nar_sha256, + } => { // Construct a AsyncRead reading from the data as its downloaded. let r = self.download(url.clone()).await?; @@ -396,7 +426,10 @@ mod tests { .unwrap(), ); - let fetch = Fetch::URL(url, Some(exp_hash)); + let fetch = Fetch::URL { + url, + exp_hash: Some(exp_hash), + }; assert_eq!( "06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch", &fetch @@ -410,10 +443,13 @@ mod tests { #[test] fn fetch_tarball_store_path() { let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(); - let exp_nixbase32 = + let exp_sha256 = nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm") .unwrap(); - let fetch = Fetch::Tarball(url, Some(exp_nixbase32)); + let fetch = Fetch::Tarball { + url, + exp_nar_sha256: Some(exp_sha256), + }; assert_eq!( "7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source", diff --git a/tvix/glue/src/known_paths.rs b/tvix/glue/src/known_paths.rs index 290c9d5b6988..049dd96ca422 100644 --- a/tvix/glue/src/known_paths.rs +++ b/tvix/glue/src/known_paths.rs @@ -160,16 +160,16 @@ mod tests { static ref FOO_OUT_PATH: StorePath = StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse"); - static ref FETCH_URL : Fetch = Fetch::URL( - Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(), - Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap())) - ); + static ref FETCH_URL : Fetch = Fetch::URL{ + url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(), + exp_hash: Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap())) + }; static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap(); - static ref FETCH_TARBALL : Fetch = Fetch::Tarball( - Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(), - Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap()) - ); + static ref FETCH_TARBALL : Fetch = Fetch::Tarball{ + url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(), + exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap()) + }; static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap(); } |