1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
use parking_lot::RwLock;
use std::io::{self, Cursor, Write};
use std::task::Poll;
use std::{collections::HashMap, sync::Arc};
use tonic::async_trait;
use tracing::instrument;
use super::{BlobReader, BlobService, BlobWriter};
use crate::composition::{CompositionContext, ServiceBuilder};
use crate::B3Digest;
#[derive(Clone, Default)]
pub struct MemoryBlobService {
db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>,
}
#[async_trait]
impl BlobService for MemoryBlobService {
#[instrument(skip_all, ret, err, fields(blob.digest=%digest))]
async fn has(&self, digest: &B3Digest) -> io::Result<bool> {
let db = self.db.read();
Ok(db.contains_key(digest))
}
#[instrument(skip_all, err, fields(blob.digest=%digest))]
async fn open_read(&self, digest: &B3Digest) -> io::Result<Option<Box<dyn BlobReader>>> {
let db = self.db.read();
match db.get(digest).map(|x| Cursor::new(x.clone())) {
Some(result) => Ok(Some(Box::new(result))),
None => Ok(None),
}
}
#[instrument(skip_all)]
async fn open_write(&self) -> Box<dyn BlobWriter> {
Box::new(MemoryBlobWriter::new(self.db.clone()))
}
}
#[derive(serde::Deserialize, Debug)]
#[serde(deny_unknown_fields)]
pub struct MemoryBlobServiceConfig {}
#[async_trait]
impl ServiceBuilder for MemoryBlobServiceConfig {
type Output = dyn BlobService;
async fn build<'a>(
&'a self,
_instance_name: &str,
_context: &CompositionContext<dyn BlobService>,
) -> Result<Arc<dyn BlobService>, Box<dyn std::error::Error + Send + Sync + 'static>> {
Ok(Arc::new(MemoryBlobService::default()))
}
}
pub struct MemoryBlobWriter {
db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>,
/// Contains the buffer Vec and hasher, or None if already closed
writers: Option<(Vec<u8>, blake3::Hasher)>,
/// The digest that has been returned, if we successfully closed.
digest: Option<B3Digest>,
}
impl MemoryBlobWriter {
fn new(db: Arc<RwLock<HashMap<B3Digest, Vec<u8>>>>) -> Self {
Self {
db,
writers: Some((Vec::new(), blake3::Hasher::new())),
digest: None,
}
}
}
impl tokio::io::AsyncWrite for MemoryBlobWriter {
fn poll_write(
mut self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
b: &[u8],
) -> std::task::Poll<Result<usize, io::Error>> {
Poll::Ready(match &mut self.writers {
None => Err(io::Error::new(
io::ErrorKind::NotConnected,
"already closed",
)),
Some((ref mut buf, ref mut hasher)) => {
let bytes_written = buf.write(b)?;
hasher.write(&b[..bytes_written])
}
})
}
fn poll_flush(
self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), io::Error>> {
Poll::Ready(match self.writers {
None => Err(io::Error::new(
io::ErrorKind::NotConnected,
"already closed",
)),
Some(_) => Ok(()),
})
}
fn poll_shutdown(
self: std::pin::Pin<&mut Self>,
_cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), io::Error>> {
// shutdown is "instantaneous", we only write to memory.
Poll::Ready(Ok(()))
}
}
#[async_trait]
impl BlobWriter for MemoryBlobWriter {
async fn close(&mut self) -> io::Result<B3Digest> {
if self.writers.is_none() {
match &self.digest {
Some(digest) => Ok(digest.clone()),
None => Err(io::Error::new(io::ErrorKind::BrokenPipe, "already closed")),
}
} else {
let (buf, hasher) = self.writers.take().unwrap();
let digest: B3Digest = hasher.finalize().as_bytes().into();
// Only insert if the blob doesn't already exist.
let mut db = self.db.upgradable_read();
if !db.contains_key(&digest) {
// open the database for writing.
db.with_upgraded(|db| {
// and put buf in there. This will move buf out.
db.insert(digest.clone(), buf);
});
}
self.digest = Some(digest.clone());
Ok(digest)
}
}
}
|