about summary refs log blame commit diff
path: root/tvix/store/src/chunkservice/memory.rs
blob: 1ae8b9130565352f87869f712823d296b2a5b118 (plain) (tree)




























































                                                                                           
use data_encoding::BASE64;
use std::{
    collections::HashMap,
    sync::{Arc, RwLock},
};
use tracing::instrument;

use crate::Error;

use super::ChunkService;

#[derive(Clone)]
pub struct MemoryChunkService {
    db: Arc<RwLock<HashMap<Vec<u8>, Vec<u8>>>>,
}

impl MemoryChunkService {
    pub fn new() -> Self {
        let db = Arc::new(RwLock::new(HashMap::default()));

        Self { db }
    }
}

impl ChunkService for MemoryChunkService {
    #[instrument(skip(self, digest), fields(chunk.digest=BASE64.encode(digest)))]
    fn has(&self, digest: &[u8]) -> Result<bool, Error> {
        let db = self.db.read().unwrap();
        Ok(db.get(digest).is_some())
    }

    #[instrument(skip(self), fields(chunk.digest=BASE64.encode(digest)))]
    fn get(&self, digest: &[u8]) -> Result<Option<Vec<u8>>, Error> {
        let db = self.db.read().unwrap();
        match db.get(digest) {
            None => Ok(None),
            Some(data) => {
                // calculate the hash to verify this is really what we expect
                let actual_digest = blake3::hash(&data).as_bytes().to_vec();
                if actual_digest != digest {
                    return Err(Error::StorageError(format!(
                        "invalid hash encountered when reading chunk, expected {}, got {}",
                        BASE64.encode(digest),
                        BASE64.encode(&actual_digest),
                    )));
                }
                Ok(Some(data.clone()))
            }
        }
    }

    #[instrument(skip(self, data))]
    fn put(&self, data: Vec<u8>) -> Result<Vec<u8>, Error> {
        let digest = blake3::hash(&data).as_bytes().to_vec();

        let mut db = self.db.write().unwrap();
        db.insert(digest.clone(), data);

        Ok(digest)
    }
}