1use crate::format::builder::{
6 FormatCreateBuilder, FormatCreateBuilderBase, FormatDriverBuilder, FormatDriverBuilderBase,
7};
8use crate::format::drivers::FormatDriverInstance;
9use crate::format::gate::ImplicitOpenGate;
10use crate::format::{Format, PreallocateMode};
11use crate::{
12 storage, DenyImplicitOpenGate, ShallowMapping, Storage, StorageExt, StorageOpenOptions,
13};
14use async_trait::async_trait;
15use std::fmt::{self, Display, Formatter};
16use std::io;
17use std::path::{Path, PathBuf};
18use std::sync::atomic::{AtomicU64, Ordering};
19
20#[derive(Debug)]
22pub struct Raw<S: Storage + 'static> {
23 inner: S,
25
26 writable: bool,
28
29 size: AtomicU64,
31}
32
33impl<S: Storage + 'static> Raw<S> {
34 pub fn builder(image: S) -> RawOpenBuilder<S> {
36 RawOpenBuilder::new(image)
37 }
38
39 pub fn builder_path<P: AsRef<Path>>(image_path: P) -> RawOpenBuilder<S> {
41 RawOpenBuilder::new_path(image_path)
42 }
43
44 pub fn create_builder(image: S) -> RawCreateBuilder<S> {
46 RawCreateBuilder::new(image)
47 }
48
49 pub async fn open_image(inner: S, writable: bool) -> io::Result<Self> {
51 let size = inner.size()?;
52 Ok(Raw {
53 inner,
54 writable,
55 size: size.into(),
56 })
57 }
58
59 pub async fn open_path<P: AsRef<Path>>(path: P, writable: bool) -> io::Result<Self> {
61 let storage_opts = StorageOpenOptions::new().write(writable).filename(path);
62 let inner = S::open(storage_opts).await?;
63 Self::open_image(inner, writable).await
64 }
65
66 #[cfg(feature = "sync-wrappers")]
68 pub fn open_image_sync(inner: S, writable: bool) -> io::Result<Self> {
69 let size = inner.size()?;
70 Ok(Raw {
71 inner,
72 writable,
73 size: size.into(),
74 })
75 }
76
77 #[cfg(feature = "sync-wrappers")]
78 pub fn open_path_sync<P: AsRef<Path>>(path: P, writable: bool) -> io::Result<Self> {
80 tokio::runtime::Builder::new_current_thread()
81 .build()?
82 .block_on(Self::open_path(path, writable))
83 }
84}
85
86#[async_trait(?Send)]
87impl<S: Storage + 'static> FormatDriverInstance for Raw<S> {
88 type Storage = S;
89
90 fn format(&self) -> Format {
91 Format::Raw
92 }
93
94 async unsafe fn probe(_storage: &S) -> io::Result<bool>
95 where
96 Self: Sized,
97 {
98 Ok(true)
99 }
100
101 fn size(&self) -> u64 {
102 self.size.load(Ordering::Relaxed)
103 }
104
105 fn zero_granularity(&self) -> Option<u64> {
106 None
107 }
108
109 fn collect_storage_dependencies(&self) -> Vec<&S> {
110 vec![&self.inner]
111 }
112
113 fn writable(&self) -> bool {
114 self.writable
115 }
116
117 async fn get_mapping<'a>(
118 &'a self,
119 offset: u64,
120 max_length: u64,
121 ) -> io::Result<(ShallowMapping<'a, S>, u64)> {
122 let remaining = match self.size().checked_sub(offset) {
123 None | Some(0) => return Ok((ShallowMapping::Eof {}, 0)),
124 Some(remaining) => remaining,
125 };
126
127 Ok((
128 ShallowMapping::Raw {
129 storage: &self.inner,
130 offset,
131 writable: true,
132 },
133 std::cmp::min(max_length, remaining),
134 ))
135 }
136
137 async fn ensure_data_mapping<'a>(
138 &'a self,
139 offset: u64,
140 length: u64,
141 _overwrite: bool,
142 ) -> io::Result<(&'a S, u64, u64)> {
143 let Some(remaining) = self.size().checked_sub(offset) else {
144 return Err(io::Error::other("Cannot allocate past the end of file"));
145 };
146 if length > remaining {
147 return Err(io::Error::other("Cannot allocate past the end of file"));
148 }
149
150 Ok((&self.inner, offset, length))
151 }
152
153 async fn ensure_zero_mapping(&self, offset: u64, length: u64) -> io::Result<(u64, u64)> {
154 let zero_align = self.inner.zero_align();
155 assert!(zero_align.is_power_of_two());
156
157 let zero_align_mask = zero_align as u64 - 1;
158
159 let aligned_end = (offset + length) & !zero_align_mask;
160 let aligned_offset = (offset + zero_align_mask) & !zero_align_mask;
161 let aligned_length = aligned_end.saturating_sub(aligned_offset);
162 if aligned_length == 0 {
163 return Ok((aligned_offset, 0));
164 }
165
166 self.inner
168 .write_zeroes(aligned_offset, aligned_length)
169 .await?;
170 Ok((aligned_offset, aligned_length))
171 }
172
173 async fn discard_to_zero(&mut self, offset: u64, length: u64) -> io::Result<(u64, u64)> {
174 self.ensure_zero_mapping(offset, length).await
175 }
176
177 async fn discard_to_any(&mut self, offset: u64, length: u64) -> io::Result<(u64, u64)> {
178 let discard_align = self.inner.discard_align();
179 assert!(discard_align.is_power_of_two());
180
181 let discard_align_mask = discard_align as u64 - 1;
182
183 let aligned_end = (offset + length) & !discard_align_mask;
184 let aligned_offset = (offset + discard_align_mask) & !discard_align_mask;
185 let aligned_length = aligned_end.saturating_sub(aligned_offset);
186 if aligned_length == 0 {
187 return Ok((aligned_offset, 0));
188 }
189
190 self.inner.discard(aligned_offset, aligned_length).await?;
191 Ok((aligned_offset, aligned_length))
192 }
193
194 async fn discard_to_backing(&mut self, offset: u64, length: u64) -> io::Result<(u64, u64)> {
195 self.discard_to_zero(offset, length).await
196 }
197
198 async fn flush(&self) -> io::Result<()> {
199 self.inner.flush().await
201 }
202
203 async fn sync(&self) -> io::Result<()> {
204 self.inner.sync().await
205 }
206
207 async unsafe fn invalidate_cache(&self) -> io::Result<()> {
208 unsafe { self.inner.invalidate_cache() }.await
211 }
212
213 async fn resize_grow(
214 &self,
215 new_size: u64,
216 format_prealloc_mode: PreallocateMode,
217 ) -> io::Result<()> {
218 if self
219 .size
220 .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |old| {
221 (new_size > old).then_some(new_size)
222 })
223 .is_err()
224 {
225 return Ok(()); }
227
228 let storage_prealloc_mode = match format_prealloc_mode {
229 PreallocateMode::None => storage::PreallocateMode::None,
230 PreallocateMode::Zero | PreallocateMode::FormatAllocate => {
231 storage::PreallocateMode::Zero
232 }
233 PreallocateMode::FullAllocate => storage::PreallocateMode::Allocate,
234 PreallocateMode::WriteData => storage::PreallocateMode::WriteData,
235 };
236 self.inner.resize(new_size, storage_prealloc_mode).await
237 }
238
239 async fn resize_shrink(&mut self, new_size: u64) -> io::Result<()> {
240 if self
241 .size
242 .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |old| {
243 (new_size < old).then_some(new_size)
244 })
245 .is_err()
246 {
247 return Ok(()); }
249
250 self.inner
251 .resize(new_size, storage::PreallocateMode::None)
252 .await
253 }
254}
255
256impl<S: Storage + 'static> Display for Raw<S> {
257 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
258 write!(f, "raw[{}]", self.inner)
259 }
260}
261
262pub struct RawOpenBuilder<S: Storage + 'static>(FormatDriverBuilderBase<S>);
264
265impl<S: Storage + 'static> FormatDriverBuilder<S> for RawOpenBuilder<S> {
266 type Format = Raw<S>;
267 const FORMAT: Format = Format::Raw;
268
269 fn new(image: S) -> Self {
270 RawOpenBuilder(FormatDriverBuilderBase::new(image))
271 }
272
273 fn new_path<P: AsRef<Path>>(path: P) -> Self {
274 RawOpenBuilder(FormatDriverBuilderBase::new_path(path))
275 }
276
277 fn write(mut self, writable: bool) -> Self {
278 self.0.set_write(writable);
279 self
280 }
281
282 fn storage_open_options(mut self, options: StorageOpenOptions) -> Self {
283 self.0.set_storage_open_options(options);
284 self
285 }
286
287 async fn open<G: ImplicitOpenGate<S>>(self, mut gate: G) -> io::Result<Self::Format> {
288 let writable = self.0.get_writable();
289 let file = self.0.open_image(&mut gate).await?;
290 Raw::open_image(file, writable).await
291 }
292
293 fn get_image_path(&self) -> Option<PathBuf> {
294 self.0.get_image_path()
295 }
296
297 fn get_writable(&self) -> bool {
298 self.0.get_writable()
299 }
300
301 fn get_storage_open_options(&self) -> Option<&StorageOpenOptions> {
302 self.0.get_storage_opts()
303 }
304}
305
306pub struct RawCreateBuilder<S: Storage + 'static>(FormatCreateBuilderBase<S>);
308
309impl<S: Storage + 'static> FormatCreateBuilder<S> for RawCreateBuilder<S> {
310 const FORMAT: Format = Format::Raw;
311 type DriverBuilder = RawOpenBuilder<S>;
312
313 fn new(image: S) -> Self {
314 RawCreateBuilder(FormatCreateBuilderBase::new(image))
315 }
316
317 fn size(mut self, size: u64) -> Self {
318 self.0.set_size(size);
319 self
320 }
321
322 fn preallocate(mut self, prealloc_mode: PreallocateMode) -> Self {
323 self.0.set_preallocate(prealloc_mode);
324 self
325 }
326
327 fn get_size(&self) -> u64 {
328 self.0.get_size()
329 }
330
331 fn get_preallocate(&self) -> PreallocateMode {
332 self.0.get_preallocate()
333 }
334
335 async fn create(self) -> io::Result<()> {
336 self.create_open(DenyImplicitOpenGate::default(), |image| {
337 Ok(Raw::builder(image))
338 })
339 .await?;
340 Ok(())
341 }
342
343 async fn create_open<
344 G: ImplicitOpenGate<S>,
345 F: FnOnce(S) -> io::Result<Self::DriverBuilder>,
346 >(
347 self,
348 open_gate: G,
349 open_builder_fn: F,
350 ) -> io::Result<Raw<S>> {
351 let size = self.0.get_size();
352 let prealloc = self.0.get_preallocate();
353 let image = self.0.get_image();
354
355 if image.size()? > 0 {
357 image.resize(size, storage::PreallocateMode::None).await?;
358 }
359
360 let img = open_builder_fn(image)?.write(true).open(open_gate).await?;
361 if size > 0 {
362 img.resize_grow(size, prealloc).await?;
363 }
364
365 Ok(img)
366 }
367}