diff options
Diffstat (limited to 'tvix/store/src/blobservice/memory.rs')
-rw-r--r-- | tvix/store/src/blobservice/memory.rs | 24 |
1 files changed, 10 insertions, 14 deletions
diff --git a/tvix/store/src/blobservice/memory.rs b/tvix/store/src/blobservice/memory.rs index 9a796ca2c0c8..1ee59d108743 100644 --- a/tvix/store/src/blobservice/memory.rs +++ b/tvix/store/src/blobservice/memory.rs @@ -1,4 +1,3 @@ -use data_encoding::BASE64; use std::io::Cursor; use std::{ collections::HashMap, @@ -7,27 +6,24 @@ use std::{ use tracing::{instrument, warn}; use super::{BlobService, BlobWriter}; -use crate::Error; - -// type B3Digest = [u8; 32]; -// struct B3Digest ([u8; 32]); +use crate::{B3Digest, Error}; #[derive(Clone, Default)] pub struct MemoryBlobService { - db: Arc<RwLock<HashMap<[u8; 32], Vec<u8>>>>, + db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>, } impl BlobService for MemoryBlobService { type BlobReader = Cursor<Vec<u8>>; type BlobWriter = MemoryBlobWriter; - #[instrument(skip(self, digest), fields(blob.digest=BASE64.encode(digest)))] - fn has(&self, digest: &[u8; 32]) -> Result<bool, Error> { + #[instrument(skip(self, digest), fields(blob.digest=%digest))] + fn has(&self, digest: &B3Digest) -> Result<bool, Error> { let db = self.db.read().unwrap(); Ok(db.contains_key(digest)) } - fn open_read(&self, digest: &[u8; 32]) -> Result<Option<Self::BlobReader>, Error> { + fn open_read(&self, digest: &B3Digest) -> Result<Option<Self::BlobReader>, Error> { let db = self.db.read().unwrap(); Ok(db.get(digest).map(|x| Cursor::new(x.clone()))) @@ -40,13 +36,13 @@ impl BlobService for MemoryBlobService { } pub struct MemoryBlobWriter { - db: Arc<RwLock<HashMap<[u8; 32], Vec<u8>>>>, + db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>, buf: Vec<u8>, } impl MemoryBlobWriter { - fn new(db: Arc<RwLock<HashMap<[u8; 32], Vec<u8>>>>) -> Self { + fn new(db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>) -> Self { Self { buf: Vec::new(), db, @@ -64,16 +60,16 @@ impl std::io::Write for MemoryBlobWriter { } impl BlobWriter for MemoryBlobWriter { - fn close(self) -> Result<[u8; 32], Error> { + fn close(self) -> Result<B3Digest, Error> { // in this memory implementation, we don't actually bother hashing // incrementally while writing, but do it at the end. let mut hasher = blake3::Hasher::new(); hasher.update(&self.buf); - let digest: [u8; 32] = hasher.finalize().into(); + let digest = B3Digest::from_vec(hasher.finalize().as_bytes().to_vec()).unwrap(); // open the database for writing. let mut db = self.db.write()?; - db.insert(digest, self.buf); + db.insert(digest.clone(), self.buf); Ok(digest) } |