Compare commits
10 commits
a1a5b08970
...
05c0c7df8f
Author | SHA1 | Date | |
---|---|---|---|
|
05c0c7df8f | ||
|
43a92bedcf | ||
|
6dd58e3b65 | ||
|
15089abba2 | ||
|
8ba04a0b94 | ||
|
3aa8ecbd77 | ||
|
e1f59b1b46 | ||
|
6762812ec5 | ||
|
db762e0187 | ||
|
577abca2db |
|
@ -23,6 +23,15 @@ thiserror = { version = "1.0", package = "thiserror-core", default-features = fa
|
||||||
num_enum = {version = "0.5.11", default-features = false}
|
num_enum = {version = "0.5.11", default-features = false}
|
||||||
replace_with = "0.1.7"
|
replace_with = "0.1.7"
|
||||||
|
|
||||||
|
miniz_oxide = {version = "0.7.1"}
|
||||||
|
zstd-safe = "6.0.5+zstd.1.5.4"
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = "*"
|
env_logger = "*"
|
||||||
test-log = "*"
|
test-log = "*"
|
||||||
|
|
||||||
|
include-blob = "0.1.2"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
include-blob = "0.1.2"
|
|
@ -36,7 +36,7 @@ pub mod std_io {
|
||||||
impl<T: Read + Seek> VolumeIo for T {
|
impl<T: Read + Seek> VolumeIo for T {
|
||||||
fn read(&mut self, dst: &mut [u8], address: u64) -> Result<(), Error> {
|
fn read(&mut self, dst: &mut [u8], address: u64) -> Result<(), Error> {
|
||||||
self.seek(std::io::SeekFrom::Start(address))
|
self.seek(std::io::SeekFrom::Start(address))
|
||||||
.map_err(|a| Error::ReadFailed)?;
|
.map_err(|_| Error::ReadFailed)?;
|
||||||
self.read_exact(dst).map_err(|_| Error::ReadFailed)
|
self.read_exact(dst).map_err(|_| Error::ReadFailed)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,8 @@ pub enum KnownObjectId {
|
||||||
QuotaTree,
|
QuotaTree,
|
||||||
UuidTree,
|
UuidTree,
|
||||||
FreeSpaceTree,
|
FreeSpaceTree,
|
||||||
RootINode = 0x100,
|
// RootINode = 0x100, // first free id, always the root inode of a fs
|
||||||
__LastFreeId = u64::MAX - 256,
|
// __LastFreeId = u64::MAX - 256, // last free id
|
||||||
DataRelocTree = u64::MAX - 9,
|
DataRelocTree = u64::MAX - 9,
|
||||||
TreeReloc = u64::MAX - 8,
|
TreeReloc = u64::MAX - 8,
|
||||||
TreeLog = u64::MAX - 7,
|
TreeLog = u64::MAX - 7,
|
||||||
|
@ -360,6 +360,17 @@ impl Parseable for ExtentData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[repr(u8)]
|
||||||
|
#[derive(Debug, Clone, Copy, FromPrimitive, IntoPrimitive)]
|
||||||
|
pub enum CompressionType {
|
||||||
|
None = 0,
|
||||||
|
Zlib,
|
||||||
|
Lzo,
|
||||||
|
ZStd,
|
||||||
|
#[num_enum(catch_all)]
|
||||||
|
Invalid(u8),
|
||||||
|
}
|
||||||
|
|
||||||
#[repr(C, packed(1))]
|
#[repr(C, packed(1))]
|
||||||
#[derive(Debug, Clone, Copy, FromBytes, AsBytes)]
|
#[derive(Debug, Clone, Copy, FromBytes, AsBytes)]
|
||||||
pub struct ExtentData1 {
|
pub struct ExtentData1 {
|
||||||
|
@ -372,6 +383,14 @@ pub struct ExtentData1 {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExtentData1 {
|
impl ExtentData1 {
|
||||||
|
pub fn decoded_size(&self) -> u64 {
|
||||||
|
self.decoded_size.get()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compression(&self) -> CompressionType {
|
||||||
|
self.compression.into()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ty(&self) -> ExtentDataType {
|
pub fn ty(&self) -> ExtentDataType {
|
||||||
match self.ty {
|
match self.ty {
|
||||||
0 => ExtentDataType::Inline,
|
0 => ExtentDataType::Inline,
|
||||||
|
|
|
@ -16,8 +16,12 @@ pub struct PartialKey {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialKey {
|
impl PartialKey {
|
||||||
pub fn new(id: Option<KnownObjectId>, ty: Option<ObjectType>, offset: Option<u64>) -> Self {
|
pub fn new(id: KnownObjectId, ty: Option<ObjectType>, offset: Option<u64>) -> Self {
|
||||||
Self { id, ty, offset }
|
Self {
|
||||||
|
id: Some(id),
|
||||||
|
ty,
|
||||||
|
offset,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,25 +69,18 @@ mod partial_key_tests {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(KnownObjectId::ChunkTree, Some(ObjectType::DirItem), None);
|
||||||
Some(KnownObjectId::ChunkTree),
|
|
||||||
Some(ObjectType::DirItem),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
assert_eq!(pkey.partial_cmp(&key), None);
|
assert_eq!(pkey.partial_cmp(&key), None);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0xdeadbeef),
|
Some(0xdeadbeef),
|
||||||
);
|
);
|
||||||
assert_ne!(pkey.partial_cmp(&key), Some(core::cmp::Ordering::Equal));
|
assert_ne!(pkey.partial_cmp(&key), Some(core::cmp::Ordering::Equal));
|
||||||
|
|
||||||
let pkey = PartialKey::new(None, Some(ObjectType::DirItem), Some(0xdeadbeef));
|
|
||||||
assert_ne!(pkey.partial_cmp(&key), None);
|
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0x8dbfc2d2),
|
Some(0x8dbfc2d2),
|
||||||
);
|
);
|
||||||
|
@ -98,25 +95,18 @@ mod partial_key_tests {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(KnownObjectId::ChunkTree, Some(ObjectType::DirItem), None);
|
||||||
Some(KnownObjectId::ChunkTree),
|
|
||||||
Some(ObjectType::DirItem),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
assert!(pkey.eq(&key));
|
assert!(pkey.eq(&key));
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0xdeadbeef),
|
Some(0xdeadbeef),
|
||||||
);
|
);
|
||||||
assert!(!pkey.eq(&key));
|
assert!(!pkey.eq(&key));
|
||||||
|
|
||||||
let pkey = PartialKey::new(None, Some(ObjectType::DirItem), Some(0xdeadbeef));
|
|
||||||
assert!(!pkey.eq(&key));
|
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0x8dbfc2d2),
|
Some(0x8dbfc2d2),
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,11 +1,3 @@
|
||||||
use core::{
|
|
||||||
cmp::Ordering,
|
|
||||||
fmt::Debug,
|
|
||||||
ops::{Bound, RangeBounds},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::Error;
|
|
||||||
|
|
||||||
pub mod error {
|
pub mod error {
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
@ -29,6 +21,8 @@ pub mod error {
|
||||||
NoDefaultSubvolFsRoot,
|
NoDefaultSubvolFsRoot,
|
||||||
#[error("INode could not be found in FsTree")]
|
#[error("INode could not be found in FsTree")]
|
||||||
INodeNotFound,
|
INodeNotFound,
|
||||||
|
#[error("decompression error")]
|
||||||
|
DecompressionError,
|
||||||
#[error("attempted to access {index}th item out of bounds {range:?}")]
|
#[error("attempted to access {index}th item out of bounds {range:?}")]
|
||||||
OutOfBounds {
|
OutOfBounds {
|
||||||
range: core::ops::Range<usize>,
|
range: core::ops::Range<usize>,
|
||||||
|
@ -59,11 +53,22 @@ pub trait Read {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Read for &[u8] {
|
||||||
|
fn read(&self, dst: &mut [u8], address: u64) -> error::Result<()> {
|
||||||
|
let src = self
|
||||||
|
.get(address as usize..address as usize + dst.len())
|
||||||
|
.ok_or(error::Error::ReadFailed)?;
|
||||||
|
dst.copy_from_slice(src);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(all(any(feature = "std", test), unix))]
|
#[cfg(all(any(feature = "std", test), unix))]
|
||||||
impl Read for std::fs::File {
|
impl Read for std::fs::File {
|
||||||
fn read(&self, dst: &mut [u8], address: u64) -> error::Result<()> {
|
fn read(&self, dst: &mut [u8], address: u64) -> error::Result<()> {
|
||||||
use std::os::unix::prelude::FileExt;
|
use std::os::unix::prelude::FileExt;
|
||||||
self.read_at(dst, address).map_err(|_| Error::ReadFailed)?;
|
self.read_at(dst, address)
|
||||||
|
.map_err(|_| error::Error::ReadFailed)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,275 +76,3 @@ impl Read for std::fs::File {
|
||||||
pub mod file;
|
pub mod file;
|
||||||
pub mod tree;
|
pub mod tree;
|
||||||
pub mod volume;
|
pub mod volume;
|
||||||
|
|
||||||
pub fn cmp_start_bound<T: PartialOrd>(
|
|
||||||
rhs: &core::ops::Bound<T>,
|
|
||||||
lhs: &core::ops::Bound<T>,
|
|
||||||
) -> Option<core::cmp::Ordering> {
|
|
||||||
match rhs {
|
|
||||||
core::ops::Bound::Included(r) => match lhs {
|
|
||||||
core::ops::Bound::Included(l) => r.partial_cmp(&l),
|
|
||||||
core::ops::Bound::Excluded(l) => match r.partial_cmp(&l) {
|
|
||||||
Some(core::cmp::Ordering::Equal) => Some(core::cmp::Ordering::Less),
|
|
||||||
i => i,
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Greater),
|
|
||||||
},
|
|
||||||
core::ops::Bound::Excluded(r) => match lhs {
|
|
||||||
core::ops::Bound::Excluded(l) => r.partial_cmp(&l),
|
|
||||||
core::ops::Bound::Included(l) => match r.partial_cmp(&l) {
|
|
||||||
Some(core::cmp::Ordering::Equal) => Some(core::cmp::Ordering::Greater),
|
|
||||||
i => i,
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Greater),
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => match lhs {
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Equal),
|
|
||||||
_ => Some(core::cmp::Ordering::Less),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cmp_end_bound<T: PartialOrd>(
|
|
||||||
rhs: &core::ops::Bound<T>,
|
|
||||||
lhs: &core::ops::Bound<T>,
|
|
||||||
) -> Option<core::cmp::Ordering> {
|
|
||||||
match rhs {
|
|
||||||
core::ops::Bound::Included(r) => match lhs {
|
|
||||||
core::ops::Bound::Included(l) => r.partial_cmp(&l),
|
|
||||||
core::ops::Bound::Excluded(l) => match r.partial_cmp(&l) {
|
|
||||||
Some(core::cmp::Ordering::Equal) => Some(core::cmp::Ordering::Greater),
|
|
||||||
i => i,
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Less),
|
|
||||||
},
|
|
||||||
core::ops::Bound::Excluded(r) => match lhs {
|
|
||||||
core::ops::Bound::Excluded(l) => r.partial_cmp(&l),
|
|
||||||
core::ops::Bound::Included(l) => match r.partial_cmp(&l) {
|
|
||||||
Some(core::cmp::Ordering::Equal) => Some(core::cmp::Ordering::Greater),
|
|
||||||
i => i,
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Less),
|
|
||||||
},
|
|
||||||
core::ops::Bound::Unbounded => match lhs {
|
|
||||||
core::ops::Bound::Unbounded => Some(core::cmp::Ordering::Equal),
|
|
||||||
_ => Some(core::cmp::Ordering::Greater),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
struct StartBound<T: PartialOrd>(Bound<T>);
|
|
||||||
impl<T: PartialOrd> PartialOrd for StartBound<T> {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
|
||||||
cmp_start_bound(&self.0, &other.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T: Ord> Ord for StartBound<T> {
|
|
||||||
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
|
||||||
self.partial_cmp(other).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
struct EndBound<T: PartialOrd>(Bound<T>);
|
|
||||||
|
|
||||||
impl<T: PartialOrd> PartialOrd for EndBound<T> {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
|
||||||
cmp_end_bound(&self.0, &other.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T: Ord> Ord for EndBound<T> {
|
|
||||||
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
|
||||||
// safety: partial_cmp only returns None when returning T::partial_cmp
|
|
||||||
self.partial_cmp(other).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn range_countains_bound<T: Ord + Debug, R: RangeBounds<T>>(
|
|
||||||
range: R,
|
|
||||||
bound: Bound<&T>,
|
|
||||||
) -> bool {
|
|
||||||
let start = &StartBound(bound);
|
|
||||||
let end = &EndBound(bound);
|
|
||||||
let r_start = &StartBound(range.start_bound());
|
|
||||||
let r_end = &EndBound(range.end_bound());
|
|
||||||
log::info!(
|
|
||||||
"start: {start:?} <=> {r_start:?}: {:?} {:?}",
|
|
||||||
start.cmp(r_start),
|
|
||||||
r_start.cmp(start)
|
|
||||||
);
|
|
||||||
log::info!(
|
|
||||||
"end: {end:?} <=> {r_end:?}: {:?} {:?}",
|
|
||||||
end.cmp(r_end),
|
|
||||||
r_end.cmp(end)
|
|
||||||
);
|
|
||||||
(start.cmp(r_start).is_ge() || r_start.cmp(start).is_le())
|
|
||||||
&& (end.cmp(r_end).is_ge() || r_end.cmp(end).is_le())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod bound_ord_tests {
|
|
||||||
use core::{cmp::Ordering, ops::RangeBounds};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
use test_log::test;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn start_bound_ord() {
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&Bound::Unbounded, &Bound::Included(0)),
|
|
||||||
Some(Ordering::Less)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound::<i32>(&Bound::Unbounded, &Bound::Unbounded),
|
|
||||||
Some(Ordering::Equal)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&Bound::Included(0), &Bound::Included(0)),
|
|
||||||
Some(Ordering::Equal)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&Bound::Excluded(0), &Bound::Included(0)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
assert_ne!(
|
|
||||||
cmp_start_bound(&Bound::Excluded(0), &Bound::Included(1)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
// This is actually WRONG and is why we have to test both ways because I
|
|
||||||
// can't think of a way to actually determine how the 2 bounds are
|
|
||||||
// ordered without knowing the smallest discrete step size of T.
|
|
||||||
//
|
|
||||||
// In this case technically they should be equal, but for floats (which
|
|
||||||
// arent ord anyways?) this would be wrong
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&Bound::Included(1), &Bound::Excluded(0)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&Bound::Included(0), &Bound::Excluded(1)),
|
|
||||||
Some(Ordering::Less)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn end_bound_ord() {
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound::<i32>(&Bound::Unbounded, &Bound::Unbounded),
|
|
||||||
Some(Ordering::Equal)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&Bound::Unbounded, &Bound::Included(0)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&Bound::Included(0), &Bound::Included(0)),
|
|
||||||
Some(Ordering::Equal)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&Bound::Excluded(0), &Bound::Included(0)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
assert_ne!(
|
|
||||||
cmp_end_bound(&Bound::Excluded(0), &Bound::Included(1)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
// This is actually WRONG and is why we have to test both ways because I
|
|
||||||
// can't think of a way to actually determine how the 2 bounds are
|
|
||||||
// ordered without knowing the smallest discrete step size of T.
|
|
||||||
//
|
|
||||||
// In this case technically they should be equal, but for floats (which
|
|
||||||
// arent ord anyways?) this would be wrong
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&Bound::Included(1), &Bound::Excluded(0)),
|
|
||||||
Some(Ordering::Greater)
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&Bound::Included(0), &Bound::Excluded(1)),
|
|
||||||
Some(Ordering::Less)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bound_ord() {
|
|
||||||
let r1 = 0..4;
|
|
||||||
let r2 = 2..3;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&r1.start_bound(), &r2.start_bound()),
|
|
||||||
Some(core::cmp::Ordering::Less)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&r1.end_bound(), &r2.end_bound()),
|
|
||||||
Some(core::cmp::Ordering::Greater)
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&r2.start_bound(), &r1.start_bound()),
|
|
||||||
Some(core::cmp::Ordering::Greater)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&r2.end_bound(), &r1.end_bound()),
|
|
||||||
Some(core::cmp::Ordering::Less)
|
|
||||||
);
|
|
||||||
|
|
||||||
let r1 = 0..=8;
|
|
||||||
let r2 = 0..9;
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
cmp_start_bound(&r1.start_bound(), &r2.start_bound()),
|
|
||||||
Some(core::cmp::Ordering::Equal)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&r1.end_bound(), &r2.end_bound()),
|
|
||||||
Some(core::cmp::Ordering::Less)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cmp_end_bound(&r2.end_bound(), &r1.end_bound()),
|
|
||||||
Some(core::cmp::Ordering::Greater)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ranges_intersect<T, R1, R2>(rhs: R1, lhs: R2) -> ()
|
|
||||||
where
|
|
||||||
T: Ord,
|
|
||||||
R1: core::ops::RangeBounds<T>,
|
|
||||||
R2: core::ops::RangeBounds<T>,
|
|
||||||
{
|
|
||||||
let a = rhs.start_bound();
|
|
||||||
let b = rhs.end_bound();
|
|
||||||
let x = lhs.start_bound();
|
|
||||||
let y = lhs.end_bound();
|
|
||||||
|
|
||||||
// check that a <=> x is different than b <=> x
|
|
||||||
|
|
||||||
// a <=> x
|
|
||||||
match a {
|
|
||||||
Bound::Included(a) => match x {
|
|
||||||
Bound::Included(x) => a.partial_cmp(x),
|
|
||||||
Bound::Excluded(x) => match a.partial_cmp(x) {
|
|
||||||
Some(Ordering::Equal) => Some(Ordering::Less),
|
|
||||||
ord => ord,
|
|
||||||
},
|
|
||||||
Bound::Unbounded => Some(Ordering::Greater),
|
|
||||||
},
|
|
||||||
Bound::Excluded(a) => match x {
|
|
||||||
Bound::Included(x) => match a.partial_cmp(x) {
|
|
||||||
Some(Ordering::Equal) => Some(Ordering::Greater),
|
|
||||||
ord => ord,
|
|
||||||
},
|
|
||||||
Bound::Excluded(x) => a.partial_cmp(x),
|
|
||||||
Bound::Unbounded => Some(Ordering::Less),
|
|
||||||
},
|
|
||||||
Bound::Unbounded => match x {
|
|
||||||
Bound::Unbounded => Some(Ordering::Equal),
|
|
||||||
_ => Some(Ordering::Less),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
use core::cell::Cell;
|
use alloc::boxed::Box;
|
||||||
|
use core::cell::RefCell;
|
||||||
use core::fmt::Display;
|
use core::fmt::Display;
|
||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
use core::mem::size_of;
|
use core::mem::size_of;
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
|
use core::ptr::NonNull;
|
||||||
|
|
||||||
use crate::structs::{Header, Item, Key, KeyPtr, KnownObjectId, ObjectType, TreeItem};
|
use crate::structs::{Header, Item, Key, KeyPtr, KnownObjectId, ObjectType, TreeItem};
|
||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
|
@ -22,21 +24,33 @@ pub struct BTreeLeafNode {
|
||||||
pub items: Vec<Item>,
|
pub items: Vec<Item>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug)]
|
||||||
pub enum NodePtr {
|
pub enum NodePtr {
|
||||||
Unvisited(KeyPtr),
|
Unvisited(KeyPtr),
|
||||||
Visited { key: KeyPtr, node: Rc<Node> }, // TODO: this doesnt need to be an Rc, can just be a NonNull with manual memory management
|
Visited { key: KeyPtr, node: BoxedNode }, // TODO: this doesnt need to be an Rc, can just be a NonNull with manual memory management
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for NodePtr {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
match self {
|
||||||
|
Self::Unvisited(arg0) => Self::Unvisited(arg0.clone()),
|
||||||
|
Self::Visited { key, node } => Self::Visited {
|
||||||
|
key: key.clone(),
|
||||||
|
node: Node::clone_from_nonnull(node),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NodePtr {
|
impl NodePtr {
|
||||||
pub fn key_ptr(&self) -> &KeyPtr {
|
pub fn key_ptr(&self) -> &KeyPtr {
|
||||||
match self {
|
match self {
|
||||||
NodePtr::Unvisited(key) => key,
|
NodePtr::Unvisited(key) => key,
|
||||||
NodePtr::Visited { key, node } => key,
|
NodePtr::Visited { key, .. } => key,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node(&self) -> Option<&Rc<Node>> {
|
pub fn node(&self) -> Option<&BoxedNode> {
|
||||||
match self {
|
match self {
|
||||||
NodePtr::Unvisited(_) => None,
|
NodePtr::Unvisited(_) => None,
|
||||||
NodePtr::Visited { node, .. } => Some(&node),
|
NodePtr::Visited { node, .. } => Some(&node),
|
||||||
|
@ -46,15 +60,25 @@ impl NodePtr {
|
||||||
pub fn key(&self) -> &Key {
|
pub fn key(&self) -> &Key {
|
||||||
&self.key_ptr().key
|
&self.key_ptr().key
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn destroy(self) {
|
||||||
|
match self {
|
||||||
|
Self::Visited { node, .. } => {
|
||||||
|
_ = node;
|
||||||
|
// TODO: from box drop
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An internal node in a btrfs tree, containing `KeyPtr`s to other internal nodes or leaf nodes.
|
/// An internal node in a btrfs tree, containing `KeyPtr`s to other internal nodes or leaf nodes.
|
||||||
#[derive(Derivative)]
|
#[derive(Derivative, Clone)]
|
||||||
#[derivative(Debug)]
|
#[derivative(Debug)]
|
||||||
pub struct BTreeInternalNode {
|
pub struct BTreeInternalNode {
|
||||||
pub header: Header,
|
pub header: Header,
|
||||||
#[derivative(Debug = "ignore")]
|
#[derivative(Debug = "ignore")]
|
||||||
children: Vec<Cell<NodePtr>>,
|
children: Vec<RefCell<NodePtr>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BTreeInternalNode {
|
impl BTreeInternalNode {
|
||||||
|
@ -62,7 +86,7 @@ impl BTreeInternalNode {
|
||||||
&self,
|
&self,
|
||||||
idx: usize,
|
idx: usize,
|
||||||
volume: &super::volume::Volume<R>,
|
volume: &super::volume::Volume<R>,
|
||||||
) -> Result<Rc<Node>> {
|
) -> Result<BoxedNode> {
|
||||||
match self.children.get(idx) {
|
match self.children.get(idx) {
|
||||||
Some(child) => self.visit_child_inner(child, volume),
|
Some(child) => self.visit_child_inner(child, volume),
|
||||||
None => Err(Error::OutOfBounds {
|
None => Err(Error::OutOfBounds {
|
||||||
|
@ -74,16 +98,16 @@ impl BTreeInternalNode {
|
||||||
|
|
||||||
fn visit_child_inner<R: super::Read>(
|
fn visit_child_inner<R: super::Read>(
|
||||||
&self,
|
&self,
|
||||||
child: &Cell<NodePtr>,
|
child: &RefCell<NodePtr>,
|
||||||
volume: &super::volume::Volume<R>,
|
volume: &super::volume::Volume<R>,
|
||||||
) -> Result<Rc<Node>> {
|
) -> Result<BoxedNode> {
|
||||||
match unsafe { &*child.as_ptr() } {
|
match unsafe { &*child.as_ptr() } {
|
||||||
NodePtr::Unvisited(keyptr) => {
|
NodePtr::Unvisited(keyptr) => {
|
||||||
let node = volume
|
let node = volume
|
||||||
.read_keyptr(keyptr)
|
.read_keyptr(keyptr)
|
||||||
.and_then(|bytes| Node::from_bytes(bytes))
|
.and_then(|bytes| Node::boxed_from_bytes(bytes))?;
|
||||||
.map(|node| Rc::new(node))?;
|
|
||||||
child.set(NodePtr::Visited {
|
child.replace(NodePtr::Visited {
|
||||||
key: *keyptr,
|
key: *keyptr,
|
||||||
node: node.clone(),
|
node: node.clone(),
|
||||||
});
|
});
|
||||||
|
@ -105,7 +129,7 @@ impl BTreeInternalNode {
|
||||||
pub fn visit_children<'a, 'b, R: super::Read>(
|
pub fn visit_children<'a, 'b, R: super::Read>(
|
||||||
&'a self,
|
&'a self,
|
||||||
volume: &'b super::volume::Volume<R>,
|
volume: &'b super::volume::Volume<R>,
|
||||||
) -> impl Iterator<Item = (usize, Result<Rc<Node>>)> + 'a
|
) -> impl Iterator<Item = (usize, Result<BoxedNode>)> + 'a
|
||||||
where
|
where
|
||||||
'b: 'a,
|
'b: 'a,
|
||||||
{
|
{
|
||||||
|
@ -131,13 +155,13 @@ impl PartialEq for BTreeLeafNode {
|
||||||
impl Eq for BTreeLeafNode {}
|
impl Eq for BTreeLeafNode {}
|
||||||
impl Eq for BTreeInternalNode {}
|
impl Eq for BTreeInternalNode {}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum BTreeNode {
|
pub enum BTreeNode {
|
||||||
Internal(BTreeInternalNode),
|
Internal(BTreeInternalNode),
|
||||||
Leaf(BTreeLeafNode),
|
Leaf(BTreeLeafNode),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Derivative, Eq)]
|
#[derive(Derivative, Eq, Clone)]
|
||||||
#[derivative(Debug, PartialEq)]
|
#[derivative(Debug, PartialEq)]
|
||||||
pub struct Node {
|
pub struct Node {
|
||||||
inner: BTreeNode,
|
inner: BTreeNode,
|
||||||
|
@ -146,7 +170,41 @@ pub struct Node {
|
||||||
bytes: Vec<u8>,
|
bytes: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
type BoxedNode = Rc<Node>;
|
#[repr(transparent)]
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct BoxedNode(NonNull<Node>);
|
||||||
|
|
||||||
|
impl Deref for BoxedNode {
|
||||||
|
type Target = Node;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
self.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<NonNull<Node>> for BoxedNode {
|
||||||
|
fn from(value: NonNull<Node>) -> Self {
|
||||||
|
Self(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Node> for BoxedNode {
|
||||||
|
fn from(value: Node) -> Self {
|
||||||
|
Self(unsafe { NonNull::new_unchecked(Box::leak(Box::new(value))) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BoxedNode {
|
||||||
|
pub fn as_ref(&self) -> &Node {
|
||||||
|
unsafe { self.0.as_ref() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_ptr(self) -> *mut Node {
|
||||||
|
self.0.as_ptr()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//type BoxedNode = NonNull<Node>;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct NodeHandle {
|
pub struct NodeHandle {
|
||||||
|
@ -166,6 +224,7 @@ impl Display for NodeHandle {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// range of nodes that iterates thru all leaf nodes from start to end, inclusively.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Range<'tree, R: super::Read> {
|
pub struct Range<'tree, R: super::Read> {
|
||||||
volume: Rc<Volume<R>>,
|
volume: Rc<Volume<R>>,
|
||||||
|
@ -174,6 +233,70 @@ pub struct Range<'tree, R: super::Read> {
|
||||||
phantom: PhantomData<&'tree ()>,
|
phantom: PhantomData<&'tree ()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub mod entry {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub enum Entry<'tree> {
|
||||||
|
Occupied(OccupiedEntry<'tree>),
|
||||||
|
Vacant(VacantEntry<'tree>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tree> From<VacantEntry<'tree>> for Entry<'tree> {
|
||||||
|
fn from(v: VacantEntry<'tree>) -> Self {
|
||||||
|
Self::Vacant(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tree> From<OccupiedEntry<'tree>> for Entry<'tree> {
|
||||||
|
fn from(v: OccupiedEntry<'tree>) -> Self {
|
||||||
|
Self::Occupied(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct OccupiedEntry<'tree> {
|
||||||
|
key: Key,
|
||||||
|
node: NodeHandle,
|
||||||
|
phantom: PhantomData<&'tree ()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tree> OccupiedEntry<'tree> {
|
||||||
|
pub fn new(key: Key, node: NodeHandle) -> Self {
|
||||||
|
Self {
|
||||||
|
key,
|
||||||
|
node,
|
||||||
|
phantom: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn key(&self) -> Key {
|
||||||
|
self.key
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn item_and_value(&self) -> Result<(Item, TreeItem)> {
|
||||||
|
self.node.parse_item()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn value(&self) -> Result<TreeItem> {
|
||||||
|
Ok(self.node.parse_item()?.1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct VacantEntry<'tree> {
|
||||||
|
phantom: PhantomData<&'tree ()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tree> VacantEntry<'tree> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
phantom: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Derivative)]
|
#[derive(Derivative)]
|
||||||
#[derivative(Debug)]
|
#[derivative(Debug)]
|
||||||
pub struct Tree<R: super::Read> {
|
pub struct Tree<R: super::Read> {
|
||||||
|
@ -186,17 +309,26 @@ impl<R: super::Read> Clone for Tree<R> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
volume: self.volume.clone(),
|
volume: self.volume.clone(),
|
||||||
root: self.root.clone(),
|
root: Node::clone_from_nonnull(&self.root),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<R: super::Read> Drop for Tree<R> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
log::debug!("======= cleaning up tree =======");
|
||||||
|
Node::destroy(self.root.clone());
|
||||||
|
log::debug!("======= [done] =======");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<R: super::Read> Tree<R> {
|
impl<R: super::Read> Tree<R> {
|
||||||
pub fn from_logical_offset(volume: Rc<Volume<R>>, logical: u64) -> Result<Self> {
|
pub fn from_logical_offset(volume: Rc<Volume<R>>, logical: u64) -> Result<Self> {
|
||||||
|
// TODO: this might read a very big range, far more than needed
|
||||||
let bytes = volume
|
let bytes = volume
|
||||||
.read_range_from_logical(logical)?
|
.read_range_from_logical(logical)?
|
||||||
.ok_or(Error::BadLogicalAddress)?; // TODO: make this a better error
|
.ok_or(Error::BadLogicalAddress)?; // TODO: make this a better error
|
||||||
let root = Rc::new(Node::from_bytes(bytes)?);
|
let root = Node::boxed_from_bytes(bytes)?;
|
||||||
|
|
||||||
Ok(Self { volume, root })
|
Ok(Self { volume, root })
|
||||||
}
|
}
|
||||||
|
@ -236,38 +368,42 @@ impl<R: super::Read> Tree<R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_node_rev<K>(&self, key: &K) -> Result<Option<NodeHandle>>
|
fn find_node_rev<K>(&self, key: &K) -> Result<Option<NodeHandle>>
|
||||||
where
|
where
|
||||||
K: PartialEq<Key> + PartialOrd<Key>,
|
K: PartialEq<Key> + PartialOrd<Key>,
|
||||||
{
|
{
|
||||||
self.find_node_inner(key, NodeHandle::find_key_rev)
|
self.find_node_inner(key, NodeHandle::find_key_rev)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_node<K>(&self, key: &K) -> Result<Option<NodeHandle>>
|
fn find_node<K>(&self, key: &K) -> Result<Option<NodeHandle>>
|
||||||
where
|
where
|
||||||
K: PartialEq<Key> + PartialOrd<Key>,
|
K: PartialEq<Key> + PartialOrd<Key>,
|
||||||
{
|
{
|
||||||
self.find_node_inner(key, NodeHandle::find_key)
|
self.find_node_inner(key, NodeHandle::find_key)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_key<K>(&self, key: &K) -> Result<Option<(Item, TreeItem)>>
|
pub fn entry<K>(&self, key: &K) -> Result<entry::Entry>
|
||||||
where
|
where
|
||||||
K: PartialEq<Key> + PartialOrd<Key>,
|
K: PartialEq<Key> + PartialOrd<Key>,
|
||||||
{
|
{
|
||||||
match self.find_node(key)? {
|
let entry: entry::Entry = match self.find_node(key)? {
|
||||||
Some(node) => node.parse_item(),
|
Some(node) => entry::OccupiedEntry::new(node.parse_key(), node).into(),
|
||||||
None => Ok(None),
|
None => entry::VacantEntry::new().into(),
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(entry)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_key_rev<K>(&self, key: &K) -> Result<Option<(Item, TreeItem)>>
|
pub fn entry_rev<K>(&self, key: &K) -> Result<entry::Entry>
|
||||||
where
|
where
|
||||||
K: PartialEq<Key> + PartialOrd<Key>,
|
K: PartialEq<Key> + PartialOrd<Key>,
|
||||||
{
|
{
|
||||||
match self.find_node_rev(key)? {
|
let entry: entry::Entry = match self.find_node_rev(key)? {
|
||||||
Some(node) => node.parse_item(),
|
Some(node) => entry::OccupiedEntry::new(node.parse_key(), node).into(),
|
||||||
None => Ok(None),
|
None => entry::VacantEntry::new().into(),
|
||||||
}
|
};
|
||||||
|
|
||||||
|
Ok(entry)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_range<K>(&self, key: &K) -> Result<Range<R>>
|
pub fn find_range<K>(&self, key: &K) -> Result<Range<R>>
|
||||||
|
@ -339,7 +475,7 @@ impl BTreeInternalNode {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.take(header.nritems.get() as usize)
|
.take(header.nritems.get() as usize)
|
||||||
.map(|ptr| Cell::new(NodePtr::Unvisited(ptr)))
|
.map(|ptr| RefCell::new(NodePtr::Unvisited(ptr)))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
Ok(Self { header, children })
|
Ok(Self { header, children })
|
||||||
|
@ -550,25 +686,85 @@ impl PartialEq for RootOrEdge {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Node {
|
impl Node {
|
||||||
|
pub fn clone_from_nonnull(this: &BoxedNode) -> BoxedNode {
|
||||||
|
(*this.as_ref()).clone().into()
|
||||||
|
}
|
||||||
|
pub fn boxed_from_bytes(bytes: Vec<u8>) -> Result<BoxedNode> {
|
||||||
|
Ok(Self::from_bytes(bytes)?.into())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn from_bytes(bytes: Vec<u8>) -> Result<Self> {
|
pub fn from_bytes(bytes: Vec<u8>) -> Result<Self> {
|
||||||
let inner = BTreeNode::parse(&bytes)?;
|
let inner = BTreeNode::parse(&bytes)?;
|
||||||
|
|
||||||
Ok(Self { inner, bytes })
|
Ok(Self { inner, bytes })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// must not be called if any outstanding live references to `this` exist
|
||||||
|
pub fn destroy(this: BoxedNode) {
|
||||||
|
log::debug!("Node::destroy");
|
||||||
|
{
|
||||||
|
log::debug!("free: {:?}", this);
|
||||||
|
let inner = &this.as_ref().inner;
|
||||||
|
match inner {
|
||||||
|
BTreeNode::Internal(node) => {
|
||||||
|
log::debug!("destroying children..");
|
||||||
|
for child in node.children.iter() {
|
||||||
|
match unsafe { &*child.as_ptr() } {
|
||||||
|
NodePtr::Visited { node, .. } => {
|
||||||
|
Self::destroy(node.clone());
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log::debug!("destroying children [end]");
|
||||||
|
}
|
||||||
|
BTreeNode::Leaf(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log::debug!("dropping: {:?}", this);
|
||||||
|
unsafe {
|
||||||
|
drop(Box::from_raw(this.as_ptr()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns Ok(None) if `i` is out of bounds
|
||||||
|
fn read_nth_key(&self, i: usize) -> Option<Key> {
|
||||||
|
match &self.inner {
|
||||||
|
BTreeNode::Internal(internal) => {
|
||||||
|
let item = internal
|
||||||
|
.children
|
||||||
|
.get(i)
|
||||||
|
.map(|child| *unsafe { &*child.as_ptr() }.key());
|
||||||
|
item
|
||||||
|
}
|
||||||
|
BTreeNode::Leaf(leaf) => {
|
||||||
|
let key = leaf.items.get(i).map(|item| item.key);
|
||||||
|
|
||||||
|
key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// returns None if pointing at an internal node or `i` is out of bounds.
|
||||||
|
/// returns an Error if parsing the item failed.
|
||||||
pub fn read_nth_item(&self, i: usize) -> Result<Option<(Item, TreeItem)>> {
|
pub fn read_nth_item(&self, i: usize) -> Result<Option<(Item, TreeItem)>> {
|
||||||
match &self.inner {
|
match &self.inner {
|
||||||
BTreeNode::Internal(_) => Ok(None),
|
BTreeNode::Internal(_) => Ok(None),
|
||||||
BTreeNode::Leaf(leaf) => {
|
BTreeNode::Leaf(leaf) => {
|
||||||
// TODO: better error to indicate that it was out of bounds
|
// TODO: better error to indicate that it was out of bounds
|
||||||
let item = leaf.items.get(i).ok_or(Error::ReadFailed)?;
|
let item = if let Some(item) = leaf.items.get(i) {
|
||||||
let start = size_of::<Header>() + item.offset.get() as usize;
|
let start = size_of::<Header>() + item.offset.get() as usize;
|
||||||
let size = item.size.get() as usize;
|
let size = item.size.get() as usize;
|
||||||
let bytes = &self.bytes[start..start + size];
|
let bytes = &self.bytes[start..start + size];
|
||||||
|
|
||||||
let value = TreeItem::parse(item, bytes)?;
|
let value = TreeItem::parse(item, bytes)?;
|
||||||
|
Some((*item, value))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Some((*item, value)))
|
Ok(item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -587,7 +783,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(volume: Rc<Volume<R>>, start: Rc<Node>, end: Rc<Node>) -> Self {
|
pub fn new(volume: Rc<Volume<R>>, start: BoxedNode, end: BoxedNode) -> Self {
|
||||||
Self::from_handles(volume, NodeHandle::start(start), NodeHandle::end(end))
|
Self::from_handles(volume, NodeHandle::start(start), NodeHandle::end(end))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -613,7 +809,7 @@ where
|
||||||
});
|
});
|
||||||
|
|
||||||
if self.start.node.inner.is_leaf() {
|
if self.start.node.inner.is_leaf() {
|
||||||
break self.start.as_handle().parse_item().expect("range item");
|
break Some(self.start.as_handle().parse_item().expect("range item"));
|
||||||
}
|
}
|
||||||
// else repeat
|
// else repeat
|
||||||
} else {
|
} else {
|
||||||
|
@ -638,7 +834,7 @@ where
|
||||||
});
|
});
|
||||||
|
|
||||||
if self.end.node.inner.is_leaf() {
|
if self.end.node.inner.is_leaf() {
|
||||||
break self.end.as_handle().parse_item().expect("range item");
|
break Some(self.end.as_handle().parse_item().expect("range item"));
|
||||||
}
|
}
|
||||||
// else repeat
|
// else repeat
|
||||||
} else {
|
} else {
|
||||||
|
@ -658,23 +854,19 @@ impl NodeHandle {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_item(&self) -> Result<Option<(Item, TreeItem)>> {
|
/// returns None if pointing at an internal node or `i` is out of bounds.
|
||||||
self.node.read_nth_item(self.idx as usize)
|
/// returns an Error if parsing the item failed.
|
||||||
|
pub fn parse_item(&self) -> Result<(Item, TreeItem)> {
|
||||||
|
self.node
|
||||||
|
.read_nth_item(self.idx as usize)
|
||||||
|
.map(|result| result.unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn advance_sideways(self) -> NodeHandleAdvanceResult {
|
/// returns an Error if the key read fails
|
||||||
let header = self.node.inner.header();
|
pub fn parse_key(&self) -> Key {
|
||||||
if header.nritems.get() >= self.idx + 1 {
|
self.node
|
||||||
NodeHandleAdvanceResult::Ascend
|
.read_nth_key(self.idx as usize)
|
||||||
} else {
|
.expect("idx out of bounds")
|
||||||
match &self.node.inner {
|
|
||||||
BTreeNode::Internal(_) => NodeHandleAdvanceResult::Next(Self {
|
|
||||||
idx: self.idx + 1,
|
|
||||||
..self
|
|
||||||
}),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_key_rev<K: PartialEq<Key> + PartialOrd<Key>>(self, key: &K) -> SearchResult {
|
pub fn find_key_rev<K: PartialEq<Key> + PartialOrd<Key>>(self, key: &K) -> SearchResult {
|
||||||
|
@ -727,12 +919,6 @@ impl NodeHandle {
|
||||||
}
|
}
|
||||||
BTreeNode::Leaf(node) => {
|
BTreeNode::Leaf(node) => {
|
||||||
for (i, child) in node.items.iter().enumerate() {
|
for (i, child) in node.items.iter().enumerate() {
|
||||||
// if key < &child.key {
|
|
||||||
// return SearchResult::Leaf(NodeHandle {
|
|
||||||
// node: self.clone(),
|
|
||||||
// idx: if i == 0 { 0 } else { i as u32 - 1 },
|
|
||||||
// });
|
|
||||||
// } else
|
|
||||||
if key.eq(&child.key) {
|
if key.eq(&child.key) {
|
||||||
return SearchResult::Leaf(NodeHandle {
|
return SearchResult::Leaf(NodeHandle {
|
||||||
idx: i as u32,
|
idx: i as u32,
|
||||||
|
@ -868,20 +1054,20 @@ impl NodeHandle {
|
||||||
|
|
||||||
/// key lookup that will find the first key that matches the present items in this partial key
|
/// key lookup that will find the first key that matches the present items in this partial key
|
||||||
pub struct PartialKey {
|
pub struct PartialKey {
|
||||||
pub id: Option<KnownObjectId>,
|
pub id: KnownObjectId,
|
||||||
pub ty: Option<ObjectType>,
|
pub ty: Option<ObjectType>,
|
||||||
pub offset: Option<u64>,
|
pub offset: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialKey {
|
impl PartialKey {
|
||||||
pub fn new(id: Option<KnownObjectId>, ty: Option<ObjectType>, offset: Option<u64>) -> Self {
|
pub fn new(id: KnownObjectId, ty: Option<ObjectType>, offset: Option<u64>) -> Self {
|
||||||
Self { id, ty, offset }
|
Self { id, ty, offset }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<Key> for PartialKey {
|
impl PartialEq<Key> for PartialKey {
|
||||||
fn eq(&self, other: &Key) -> bool {
|
fn eq(&self, other: &Key) -> bool {
|
||||||
self.id.map(|id| id == other.id()).unwrap_or(true)
|
self.id == other.id()
|
||||||
&& self.ty.map(|ty| ty == other.ty()).unwrap_or(true)
|
&& self.ty.map(|ty| ty == other.ty()).unwrap_or(true)
|
||||||
&& self
|
&& self
|
||||||
.offset
|
.offset
|
||||||
|
@ -893,17 +1079,15 @@ impl PartialEq<Key> for PartialKey {
|
||||||
/// compares Self to a key, by comparing each item with the element in key if present, and skipping to the next item if missing.
|
/// compares Self to a key, by comparing each item with the element in key if present, and skipping to the next item if missing.
|
||||||
impl PartialOrd<Key> for PartialKey {
|
impl PartialOrd<Key> for PartialKey {
|
||||||
fn partial_cmp(&self, other: &Key) -> Option<core::cmp::Ordering> {
|
fn partial_cmp(&self, other: &Key) -> Option<core::cmp::Ordering> {
|
||||||
let id = self.id.and_then(|id| id.partial_cmp(&other.id()));
|
match self.id.partial_cmp(&other.id()) {
|
||||||
let ty = self.ty.and_then(|ty| ty.partial_cmp(&other.ty()));
|
Some(core::cmp::Ordering::Equal) | None => {
|
||||||
let offset = self
|
match self.ty.and_then(|ty| ty.partial_cmp(&other.ty())) {
|
||||||
.offset
|
Some(core::cmp::Ordering::Equal) | None => self
|
||||||
.and_then(|offset| offset.partial_cmp(&other.offset.get()));
|
.offset
|
||||||
|
.and_then(|offset| offset.partial_cmp(&other.offset.get())),
|
||||||
match id {
|
ord => ord,
|
||||||
Some(core::cmp::Ordering::Equal) | None => match ty {
|
}
|
||||||
Some(core::cmp::Ordering::Equal) | None => offset,
|
}
|
||||||
ord => ord,
|
|
||||||
},
|
|
||||||
ord => ord,
|
ord => ord,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -923,25 +1107,18 @@ mod partial_key_tests {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(KnownObjectId::ChunkTree, Some(ObjectType::DirItem), None);
|
||||||
Some(KnownObjectId::ChunkTree),
|
|
||||||
Some(ObjectType::DirItem),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
assert_eq!(pkey.partial_cmp(&key), None);
|
assert_eq!(pkey.partial_cmp(&key), None);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0xdeadbeef),
|
Some(0xdeadbeef),
|
||||||
);
|
);
|
||||||
assert_ne!(pkey.partial_cmp(&key), Some(core::cmp::Ordering::Equal));
|
assert_ne!(pkey.partial_cmp(&key), Some(core::cmp::Ordering::Equal));
|
||||||
|
|
||||||
let pkey = PartialKey::new(None, Some(ObjectType::DirItem), Some(0xdeadbeef));
|
|
||||||
assert_ne!(pkey.partial_cmp(&key), None);
|
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0x8dbfc2d2),
|
Some(0x8dbfc2d2),
|
||||||
);
|
);
|
||||||
|
@ -956,25 +1133,18 @@ mod partial_key_tests {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(KnownObjectId::ChunkTree, Some(ObjectType::DirItem), None);
|
||||||
Some(KnownObjectId::ChunkTree),
|
|
||||||
Some(ObjectType::DirItem),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
assert!(pkey.eq(&key));
|
assert!(pkey.eq(&key));
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0xdeadbeef),
|
Some(0xdeadbeef),
|
||||||
);
|
);
|
||||||
assert!(!pkey.eq(&key));
|
assert!(!pkey.eq(&key));
|
||||||
|
|
||||||
let pkey = PartialKey::new(None, Some(ObjectType::DirItem), Some(0xdeadbeef));
|
|
||||||
assert!(!pkey.eq(&key));
|
|
||||||
|
|
||||||
let pkey = PartialKey::new(
|
let pkey = PartialKey::new(
|
||||||
Some(KnownObjectId::ChunkTree),
|
KnownObjectId::ChunkTree,
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(0x8dbfc2d2),
|
Some(0x8dbfc2d2),
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
use core::mem::size_of;
|
use core::mem::size_of;
|
||||||
use core::ops::{Range, RangeBounds};
|
use core::ops::RangeBounds;
|
||||||
|
|
||||||
use alloc::collections::btree_map::Entry;
|
use alloc::collections::btree_map::Entry;
|
||||||
use alloc::{collections::BTreeMap, rc::Rc, vec, vec::Vec};
|
use alloc::{collections::BTreeMap, rc::Rc, vec, vec::Vec};
|
||||||
use scroll::Pread;
|
use scroll::Pread;
|
||||||
|
|
||||||
use crate::crc32c::calculate_crc32c;
|
use crate::crc32c::calculate_crc32c;
|
||||||
use crate::path::Path;
|
use crate::path::{NormalizedPath, Path};
|
||||||
use crate::structs::{
|
use crate::structs::{
|
||||||
Chunk, DirItemEntry, DirItemType, ExtentData, INodeItem, INodeRefEntry, Item, Key, KeyPtr,
|
Chunk, CompressionType, DirItemEntry, DirItemType, ExtentData, INodeItem, INodeRefEntry, Item,
|
||||||
KnownObjectId, ObjectType, RootItem, Stripe, Superblock, TreeItem,
|
Key, KeyPtr, KnownObjectId, ObjectType, RootItem, Stripe, Superblock, TreeItem,
|
||||||
};
|
};
|
||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
|
|
||||||
|
@ -82,8 +82,8 @@ pub struct Volume<R: super::Read> {
|
||||||
// TODO: find better name
|
// TODO: find better name
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Volume2<R: super::Read> {
|
pub struct Volume2<R: super::Read> {
|
||||||
inner: Rc<Volume<R>>,
|
pub inner: Rc<Volume<R>>,
|
||||||
roots: BTreeMap<KnownObjectId, (RootItem, Tree<R>)>,
|
pub roots: BTreeMap<KnownObjectId, (RootItem, Tree<R>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: find better name
|
// TODO: find better name
|
||||||
|
@ -222,6 +222,12 @@ impl<R: super::Read> Volume<R> {
|
||||||
|
|
||||||
pub fn read_range_from_logical(&self, logical: u64) -> Result<Option<Vec<u8>>> {
|
pub fn read_range_from_logical(&self, logical: u64) -> Result<Option<Vec<u8>>> {
|
||||||
if let Some(range) = self.range_from_logical(logical) {
|
if let Some(range) = self.range_from_logical(logical) {
|
||||||
|
log::debug!(
|
||||||
|
"reading [{}, {}) ({} bytes)",
|
||||||
|
range.start,
|
||||||
|
range.end,
|
||||||
|
range.end - range.start
|
||||||
|
);
|
||||||
Ok(Some(self.read_range(range)?))
|
Ok(Some(self.read_range(range)?))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -322,14 +328,15 @@ impl<R: super::Read> Volume2<R> {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let subvol_root = root_tree
|
let subvol_root = match root_tree.entry(&key)? {
|
||||||
.find_key(&key)?
|
super::tree::entry::Entry::Occupied(entry) => Some(entry.value()?),
|
||||||
.ok_or(Error::NoDefaultSubvolRoot)?;
|
super::tree::entry::Entry::Vacant(_) => None,
|
||||||
|
}
|
||||||
|
.ok_or(Error::NoDefaultSubvolRoot)?;
|
||||||
// if we found the dir entry of the "default subvol" (mharmstone nomenclature)
|
// if we found the dir entry of the "default subvol" (mharmstone nomenclature)
|
||||||
// we then look for the root fs tree in the root tree with the ID found in the `.location` of the dir_item only (from mharmstone)
|
// we then look for the root fs tree in the root tree with the ID found in the `.location` of the dir_item only (from mharmstone)
|
||||||
|
|
||||||
let subvol_id = subvol_root
|
let subvol_id = subvol_root
|
||||||
.1
|
|
||||||
.as_dir_item()
|
.as_dir_item()
|
||||||
.expect("dir item")
|
.expect("dir item")
|
||||||
.first()
|
.first()
|
||||||
|
@ -352,7 +359,7 @@ impl<R: super::Read> Volume2<R> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: super::Read> Fs<R> {
|
impl<R: super::Read> Fs<R> {
|
||||||
fn get_inode_item(&self, inode_id: u64) -> Result<Option<INodeItem>> {
|
pub fn get_inode_item(&self, inode_id: u64) -> Result<Option<INodeItem>> {
|
||||||
if let Some((item, inoderef)) = self.find_inode_ref(inode_id)? {
|
if let Some((item, inoderef)) = self.find_inode_ref(inode_id)? {
|
||||||
if let Some(diritem) = self.find_dir_index(item.key.offset.get(), &inoderef)? {
|
if let Some(diritem) = self.find_dir_index(item.key.offset.get(), &inoderef)? {
|
||||||
let inode = self.find_inode_item(&diritem)?;
|
let inode = self.find_inode_item(&diritem)?;
|
||||||
|
@ -364,7 +371,11 @@ impl<R: super::Read> Fs<R> {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_root_dir(&self) -> INode {
|
pub fn root_dir_id(&self) -> KnownObjectId {
|
||||||
|
self.root_item.root_dirid.get().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_root_dir(&self) -> INode {
|
||||||
INode {
|
INode {
|
||||||
id: self.root_item.root_dirid.get(),
|
id: self.root_item.root_dirid.get(),
|
||||||
path: vec![],
|
path: vec![],
|
||||||
|
@ -389,7 +400,7 @@ impl<R: super::Read> Fs<R> {
|
||||||
&self,
|
&self,
|
||||||
inode: &INode,
|
inode: &INode,
|
||||||
) -> Result<impl Iterator<Item = DirItemEntry> + '_> {
|
) -> Result<impl Iterator<Item = DirItemEntry> + '_> {
|
||||||
let key = PartialKey::new(Some(inode.id()), Some(ObjectType::DirIndex), None);
|
let key = PartialKey::new(inode.id(), Some(ObjectType::DirIndex), None);
|
||||||
|
|
||||||
let children = self.fs_root.find_range(&key)?;
|
let children = self.fs_root.find_range(&key)?;
|
||||||
|
|
||||||
|
@ -419,33 +430,9 @@ impl<R: super::Read> Fs<R> {
|
||||||
P: Path,
|
P: Path,
|
||||||
{
|
{
|
||||||
if path.is_absolute() {
|
if path.is_absolute() {
|
||||||
// stuff
|
|
||||||
self.get_inode_by_path(path)
|
self.get_inode_by_path(path)
|
||||||
} else {
|
} else {
|
||||||
let path = path.normalize().into_iter();
|
self.get_inode_by_relative_normalized_path(inode, path.normalize())
|
||||||
let mut inode = inode;
|
|
||||||
|
|
||||||
for segment in path {
|
|
||||||
match segment {
|
|
||||||
crate::path::Segment::ParentDir => {
|
|
||||||
inode = self.get_inode_parent(&inode)?;
|
|
||||||
}
|
|
||||||
crate::path::Segment::File(child_name) => {
|
|
||||||
let child = self
|
|
||||||
.get_inode_children_inodes(&inode)?
|
|
||||||
.find(|child| {
|
|
||||||
child.path.last().map(|bytes| bytes.as_slice()) == Some(child_name)
|
|
||||||
})
|
|
||||||
.ok_or(Error::INodeNotFound)?
|
|
||||||
.clone();
|
|
||||||
// silly borrow checker
|
|
||||||
inode = child;
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(inode)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -461,21 +448,33 @@ impl<R: super::Read> Fs<R> {
|
||||||
_ = normalized.pop_segment();
|
_ = normalized.pop_segment();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut inode = self.get_root_dir();
|
self.get_inode_by_relative_normalized_path(self.get_root_dir(), normalized)
|
||||||
|
}
|
||||||
|
|
||||||
while let Some(segment) = normalized.pop_segment() {
|
pub fn get_inode_by_relative_normalized_path(
|
||||||
|
&self,
|
||||||
|
inode: INode,
|
||||||
|
path: NormalizedPath,
|
||||||
|
) -> Result<INode> {
|
||||||
|
let mut inode = inode;
|
||||||
|
|
||||||
|
for segment in path.iter() {
|
||||||
match segment {
|
match segment {
|
||||||
crate::path::Segment::Root | crate::path::Segment::NoOp => {} // do nothing
|
crate::path::Segment::ParentDir => {
|
||||||
crate::path::Segment::CurrentDir | crate::path::Segment::ParentDir => {
|
inode = self.get_inode_parent(&inode)?;
|
||||||
unimplemented!()
|
|
||||||
} // not normalized?
|
|
||||||
crate::path::Segment::File(child) => {
|
|
||||||
let dir_item = self
|
|
||||||
.find_inode_child(inode.id, child)?
|
|
||||||
.ok_or(Error::INodeNotFound)?;
|
|
||||||
|
|
||||||
inode = inode.into_child(dir_item.item().location.id().into(), child.to_vec());
|
|
||||||
}
|
}
|
||||||
|
crate::path::Segment::File(child_name) => {
|
||||||
|
let child = self
|
||||||
|
.get_inode_children_inodes(&inode)?
|
||||||
|
.find(|child| {
|
||||||
|
child.path.last().map(|bytes| bytes.as_slice()) == Some(child_name)
|
||||||
|
})
|
||||||
|
.ok_or(Error::INodeNotFound)?
|
||||||
|
.clone();
|
||||||
|
// silly borrow checker
|
||||||
|
inode = child;
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -485,18 +484,23 @@ impl<R: super::Read> Fs<R> {
|
||||||
fn find_inode_child(&self, parent_inode: u64, child: &[u8]) -> Result<Option<DirItemEntry>> {
|
fn find_inode_child(&self, parent_inode: u64, child: &[u8]) -> Result<Option<DirItemEntry>> {
|
||||||
let crc = calculate_crc32c(0xfffffffe, child);
|
let crc = calculate_crc32c(0xfffffffe, child);
|
||||||
let key = PartialKey::new(
|
let key = PartialKey::new(
|
||||||
Some(parent_inode.into()),
|
parent_inode.into(),
|
||||||
Some(ObjectType::DirItem),
|
Some(ObjectType::DirItem),
|
||||||
Some(crc as u64),
|
Some(crc as u64),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((_, value)) = self.fs_root.find_key(&key)? {
|
match self.fs_root.entry(&key)? {
|
||||||
let dir_items = value.as_dir_item().expect("dir index");
|
super::tree::entry::Entry::Occupied(occupied) => {
|
||||||
|
let item = occupied
|
||||||
let item = dir_items.iter().find(|item| item.name() == child).cloned();
|
.value()?
|
||||||
Ok(item)
|
.as_dir_item()
|
||||||
} else {
|
.expect("dir item")
|
||||||
Ok(None)
|
.iter()
|
||||||
|
.find(|item| item.name() == child)
|
||||||
|
.cloned();
|
||||||
|
Ok(item)
|
||||||
|
}
|
||||||
|
super::tree::entry::Entry::Vacant(_) => Ok(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,11 +512,10 @@ impl<R: super::Read> Fs<R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_inode_extents(&self, inode_id: u64) -> Result<Vec<(u64, ExtentData)>> {
|
pub fn get_inode_extents(&self, inode_id: u64) -> Result<Vec<(u64, ExtentData)>> {
|
||||||
if let Some(dir_entry) = self.get_inode_dir_index(inode_id)? {
|
if let Some(dir_entry) = self.get_inode_dir_index(inode_id)? {
|
||||||
if dir_entry.item().ty() == DirItemType::RegFile {
|
if dir_entry.item().ty() == DirItemType::RegFile {
|
||||||
let key =
|
let key = PartialKey::new(inode_id.into(), Some(ObjectType::ExtentData), None);
|
||||||
PartialKey::new(Some(inode_id.into()), Some(ObjectType::ExtentData), None);
|
|
||||||
|
|
||||||
let extents = self.fs_root.find_range(&key)?;
|
let extents = self.fs_root.find_range(&key)?;
|
||||||
|
|
||||||
|
@ -534,7 +537,7 @@ impl<R: super::Read> Fs<R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_inode_raw<I: RangeBounds<u64>>(&self, inode: &INode, range: I) -> Result<Vec<u8>> {
|
pub fn read_inode_raw<I: RangeBounds<u64>>(&self, inode: &INode, range: I) -> Result<Vec<u8>> {
|
||||||
let mut contents = Vec::new();
|
let mut contents = Vec::new();
|
||||||
let extents = self.get_inode_extents(inode.id)?;
|
let extents = self.get_inode_extents(inode.id)?;
|
||||||
|
|
||||||
|
@ -550,67 +553,188 @@ impl<R: super::Read> Fs<R> {
|
||||||
core::ops::Bound::Unbounded => None,
|
core::ops::Bound::Unbounded => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// FIXME: offsets need to be calculated with the uncompressed length and offset
|
||||||
|
// currently are calculated with compressed length and offset afaik
|
||||||
|
|
||||||
|
log::info!("extents: {}", extents.len());
|
||||||
|
log::info!("{:?}", extents);
|
||||||
for (offset, extent) in extents.into_iter().filter(|(offset, extent)| {
|
for (offset, extent) in extents.into_iter().filter(|(offset, extent)| {
|
||||||
|
// bounds of the current extent
|
||||||
let extent_start = *offset;
|
let extent_start = *offset;
|
||||||
let extent_end = extent_start + extent.len();
|
let extent_end = extent_start + extent.len();
|
||||||
|
let extent_len = extent.len();
|
||||||
|
|
||||||
|
// entire range we want to read from the file
|
||||||
let range_len = end.map(|end| end - start);
|
let range_len = end.map(|end| end - start);
|
||||||
|
|
||||||
let start2 = start.min(extent_start);
|
// start of the UNION (from lowest bound to highest bound) of the
|
||||||
|
// current extent and the entire range
|
||||||
|
let start = start.min(extent_start);
|
||||||
|
// end of the UNION of the current extent and the entire range
|
||||||
let end = end.map(|end| end.max(extent_end));
|
let end = end.map(|end| end.max(extent_end));
|
||||||
let len = end.map(|end| (end - start2));
|
// width of the union o fthe current extent and the entire range
|
||||||
|
let len = end.map(|end| (end - start));
|
||||||
|
|
||||||
if let (Some(len), Some(range_len)) = (len, range_len) {
|
if let (Some(len), Some(range_len)) = (len, range_len) {
|
||||||
range_len + range_len < len
|
// proceed if the widths of the 2 ranges (the range we want to
|
||||||
|
// read, and the current extent) are greater than the width of
|
||||||
|
// the union range:
|
||||||
|
//
|
||||||
|
// In the first example, the 2 ranges overlap, and the width of
|
||||||
|
// the union is smaller than the sum of the widths of the ranges:
|
||||||
|
//
|
||||||
|
// |------range-1------|
|
||||||
|
// |---range-2----|
|
||||||
|
// |-----width-of-union-----|
|
||||||
|
// |-------sum----|-of---widths-------|
|
||||||
|
// |------------width-of-union------------|
|
||||||
|
// |------range-1------|
|
||||||
|
// |---range-2----|
|
||||||
|
//
|
||||||
|
// In this second example, the ranges do not overlap, and the
|
||||||
|
// width of the unions is equal or greater than the sum of the
|
||||||
|
// widths.
|
||||||
|
len < extent_len + range_len
|
||||||
} else {
|
} else {
|
||||||
start2 < extent_end
|
start < extent_end
|
||||||
}
|
}
|
||||||
}) {
|
}) {
|
||||||
//
|
|
||||||
let start = start.saturating_sub(offset);
|
let start = start.saturating_sub(offset);
|
||||||
|
|
||||||
let end = end.map(|end| end - offset);
|
let end = end.map(|end| end - offset).unwrap_or(start + extent.len());
|
||||||
|
let len = end - start;
|
||||||
|
|
||||||
log::info!("reading {}..{:?} from extent.", start, end);
|
log::info!("reading {}..{:?} from extent.", start, end);
|
||||||
|
|
||||||
let data: alloc::borrow::Cow<[u8]> = match &extent {
|
let data: alloc::borrow::Cow<[u8]> = match &extent {
|
||||||
ExtentData::Inline { data, .. } => {
|
ExtentData::Inline { data, .. } => (&data[start as usize..end as usize]).into(),
|
||||||
// TODO: handle compression and encryption
|
|
||||||
let data = if let Some(end) = end {
|
|
||||||
&data[start as usize..end as usize]
|
|
||||||
} else {
|
|
||||||
&data[start as usize..]
|
|
||||||
};
|
|
||||||
|
|
||||||
data.into()
|
|
||||||
}
|
|
||||||
ExtentData::Other(extent) => {
|
ExtentData::Other(extent) => {
|
||||||
let address = extent.address() + extent.offset() + start;
|
let address = extent.address() + extent.offset();
|
||||||
let data = self
|
let address = self
|
||||||
.volume
|
.volume
|
||||||
.inner
|
.inner
|
||||||
.read_range(address..address + end.unwrap_or(extent.num_bytes()))
|
.offset_from_logical(address)
|
||||||
.expect("bytes");
|
.ok_or(Error::BadLogicalAddress)?;
|
||||||
|
|
||||||
|
let range = match extent.extent_data1().compression() {
|
||||||
|
// compressed size
|
||||||
|
CompressionType::Zlib | CompressionType::Lzo | CompressionType::ZStd => {
|
||||||
|
address..address + extent.size()
|
||||||
|
}
|
||||||
|
_ => address + start..address + start + len,
|
||||||
|
};
|
||||||
|
|
||||||
|
let data = self.volume.inner.read_range(range).expect("bytes");
|
||||||
data.into()
|
data.into()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
log::info!("reading {} bytes from file", data.len());
|
log::info!("reading {} bytes from file", data.len());
|
||||||
contents.extend_from_slice(&data);
|
log::info!("compression: {:?}", extent.header().compression());
|
||||||
|
|
||||||
|
match extent.header().compression() {
|
||||||
|
CompressionType::None => {
|
||||||
|
contents.extend_from_slice(&data);
|
||||||
|
}
|
||||||
|
CompressionType::Zlib => {
|
||||||
|
let mut state = miniz_oxide::inflate::stream::InflateState::new(
|
||||||
|
miniz_oxide::DataFormat::Zlib,
|
||||||
|
);
|
||||||
|
let mut output_data = vec![0u8; extent.header().decoded_size() as usize];
|
||||||
|
let mut output = &mut output_data[..];
|
||||||
|
let mut data = &data[..];
|
||||||
|
loop {
|
||||||
|
let result = miniz_oxide::inflate::stream::inflate(
|
||||||
|
&mut state,
|
||||||
|
&data,
|
||||||
|
&mut output,
|
||||||
|
miniz_oxide::MZFlush::None,
|
||||||
|
);
|
||||||
|
|
||||||
|
match result.status.map_err(|_| Error::DecompressionError)? {
|
||||||
|
miniz_oxide::MZStatus::Ok => {}
|
||||||
|
miniz_oxide::MZStatus::StreamEnd => break,
|
||||||
|
_ => {
|
||||||
|
log::error!("need dict ?!");
|
||||||
|
return Err(Error::DecompressionError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data = &data[result.bytes_consumed..];
|
||||||
|
output = &mut output[result.bytes_written..];
|
||||||
|
}
|
||||||
|
_ = miniz_oxide::inflate::stream::inflate(
|
||||||
|
&mut state,
|
||||||
|
&data,
|
||||||
|
&mut output,
|
||||||
|
miniz_oxide::MZFlush::Finish,
|
||||||
|
)
|
||||||
|
.status
|
||||||
|
.map_err(|_| Error::DecompressionError)?;
|
||||||
|
|
||||||
|
// truncate inflated data if needed
|
||||||
|
contents
|
||||||
|
.extend_from_slice(&output_data[start as usize..(start + len) as usize]);
|
||||||
|
}
|
||||||
|
CompressionType::Lzo => {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
CompressionType::ZStd => {
|
||||||
|
let mut output_data = vec![0u8; extent.header().decoded_size() as usize];
|
||||||
|
|
||||||
|
let mut zstd = zstd_safe::DCtx::create();
|
||||||
|
zstd.init().map_err(|e| {
|
||||||
|
log::error!("zstd init error: {}", zstd_safe::get_error_name(e));
|
||||||
|
Error::DecompressionError
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut input = zstd_safe::InBuffer::around(&data);
|
||||||
|
let mut output = zstd_safe::OutBuffer::around(&mut output_data[..]);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match zstd.decompress_stream(&mut output, &mut input) {
|
||||||
|
Ok(len) => {
|
||||||
|
if len == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log::error!(
|
||||||
|
"zstd decompress stream error: {}",
|
||||||
|
zstd_safe::get_error_name(e)
|
||||||
|
);
|
||||||
|
return Err(Error::DecompressionError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if output.pos() == extent.header().decoded_size() as usize {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
contents
|
||||||
|
.extend_from_slice(&output_data[start as usize..(start + len) as usize]);
|
||||||
|
}
|
||||||
|
c => {
|
||||||
|
log::error!("invalid compression type {:?}", c);
|
||||||
|
contents.extend_from_slice(&data);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(contents)
|
Ok(contents)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_inode_ref(&self, inode_id: u64) -> Result<Option<(Item, INodeRefEntry)>> {
|
fn find_inode_ref(&self, inode_id: u64) -> Result<Option<(Item, INodeRefEntry)>> {
|
||||||
let key = PartialKey::new(Some(inode_id.into()), Some(ObjectType::INodeRef), None);
|
let key = PartialKey::new(inode_id.into(), Some(ObjectType::INodeRef), None);
|
||||||
|
|
||||||
if let Some((item, value)) = self.fs_root.find_key(&key)? {
|
match self.fs_root.entry(&key)? {
|
||||||
let inode = value.as_inode_ref().expect("inoderef").clone();
|
super::tree::entry::Entry::Occupied(entry) => {
|
||||||
|
entry.item_and_value().map(|(item, value)| {
|
||||||
Ok(Some((item, inode)))
|
Some((item, value.as_inode_ref().expect("inode ref").clone()))
|
||||||
} else {
|
})
|
||||||
Ok(None)
|
}
|
||||||
|
super::tree::entry::Entry::Vacant(_) => Ok(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -621,29 +745,31 @@ impl<R: super::Read> Fs<R> {
|
||||||
) -> Result<Option<DirItemEntry>> {
|
) -> Result<Option<DirItemEntry>> {
|
||||||
//let crc = calculate_crc32c(0xfffffffe, &inoderef.name());
|
//let crc = calculate_crc32c(0xfffffffe, &inoderef.name());
|
||||||
let key = PartialKey::new(
|
let key = PartialKey::new(
|
||||||
Some(parent_inode.into()),
|
parent_inode.into(),
|
||||||
Some(ObjectType::DirIndex),
|
Some(ObjectType::DirIndex),
|
||||||
Some(inoderef.item().index.get()),
|
Some(inoderef.item().index.get()),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((_, value)) = self.fs_root.find_key(&key)? {
|
match self.fs_root.entry(&key)? {
|
||||||
let dir_index = value.as_dir_index().expect("dir index").clone();
|
super::tree::entry::Entry::Occupied(entry) => entry
|
||||||
Ok(Some(dir_index))
|
.item_and_value()
|
||||||
} else {
|
.map(|(_, value)| Some(value.as_dir_index().expect("dir index").clone())),
|
||||||
Ok(None)
|
super::tree::entry::Entry::Vacant(_) => Ok(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_inode_item(&self, dir_item: &DirItemEntry) -> Result<Option<INodeItem>> {
|
fn find_inode_item(&self, dir_item: &DirItemEntry) -> Result<Option<INodeItem>> {
|
||||||
dir_item.item().location;
|
match self.fs_root.entry(&dir_item.item().location)? {
|
||||||
if let Some((_, value)) = self.fs_root.find_key(&dir_item.item().location)? {
|
super::tree::entry::Entry::Occupied(entry) => entry
|
||||||
let inode = value.as_inode_item().expect("inode item").clone();
|
.item_and_value()
|
||||||
|
.map(|(_, value)| Some(value.as_inode_item().expect("inode item").clone())),
|
||||||
Ok(Some(inode))
|
super::tree::entry::Entry::Vacant(_) => Ok(None),
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fs_root(&self) -> &Tree<R> {
|
||||||
|
&self.fs_root
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -674,7 +800,7 @@ mod tests {
|
||||||
let v2 = vol.into_volume2().expect("volume2");
|
let v2 = vol.into_volume2().expect("volume2");
|
||||||
|
|
||||||
log::info!("roots:");
|
log::info!("roots:");
|
||||||
for (id, v) in v2.roots.iter() {
|
for (id, _) in v2.roots.iter() {
|
||||||
log::info!("[{id:?}] ");
|
log::info!("[{id:?}] ");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -686,11 +812,11 @@ mod tests {
|
||||||
let v2 = vol.into_volume2().expect("volume2");
|
let v2 = vol.into_volume2().expect("volume2");
|
||||||
|
|
||||||
log::info!("roots:");
|
log::info!("roots:");
|
||||||
for (id, v) in v2.roots.iter() {
|
for (id, _) in v2.roots.iter() {
|
||||||
log::info!("[{id:?}] ");
|
log::info!("[{id:?}] ");
|
||||||
}
|
}
|
||||||
log::info!("roots rev:");
|
log::info!("roots rev:");
|
||||||
for (id, v) in v2.roots.iter().rev() {
|
for (id, _) in v2.roots.iter().rev() {
|
||||||
log::info!("[{id:?}] ");
|
log::info!("[{id:?}] ");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -713,8 +839,8 @@ mod tests {
|
||||||
0x8dbfc2d2, // crc of "default"
|
0x8dbfc2d2, // crc of "default"
|
||||||
);
|
);
|
||||||
|
|
||||||
let subvol_root = root_tree.find_node(&key)?;
|
let subvol_root = root_tree.entry(&key)?;
|
||||||
let other = root_tree.find_node_rev(&key)?;
|
let other = root_tree.entry_rev(&key)?;
|
||||||
assert_eq!(subvol_root, other);
|
assert_eq!(subvol_root, other);
|
||||||
log::info!("{subvol_root:?}");
|
log::info!("{subvol_root:?}");
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -726,7 +852,7 @@ mod tests {
|
||||||
let vol = Volume::new(file).expect("volume");
|
let vol = Volume::new(file).expect("volume");
|
||||||
let v2 = vol.into_volume2().expect("volume2");
|
let v2 = vol.into_volume2().expect("volume2");
|
||||||
|
|
||||||
let fs = v2.default_subvolume().expect("subvol");
|
_ = v2.default_subvolume().expect("subvol");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -738,7 +864,7 @@ mod tests {
|
||||||
let fs = v2.default_subvolume().expect("default subvol");
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
let search_key = PartialKey::new(
|
let search_key = PartialKey::new(
|
||||||
Some(fs.root_item.root_dirid.get().into()),
|
fs.root_item.root_dirid.get().into(),
|
||||||
Some(ObjectType::DirIndex),
|
Some(ObjectType::DirIndex),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
@ -829,7 +955,7 @@ mod tests {
|
||||||
log::info!("files 1:");
|
log::info!("files 1:");
|
||||||
let now = std::time::Instant::now();
|
let now = std::time::Instant::now();
|
||||||
for (_id, entry) in fs.fs_root.iter() {
|
for (_id, entry) in fs.fs_root.iter() {
|
||||||
if let Some(dir) = entry.as_dir_index() {
|
if let Some(_dir) = entry.as_dir_index() {
|
||||||
//log::info!("{}", dir.name_as_string_lossy());
|
//log::info!("{}", dir.name_as_string_lossy());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -838,7 +964,7 @@ mod tests {
|
||||||
log::info!("files 2:");
|
log::info!("files 2:");
|
||||||
let now = std::time::Instant::now();
|
let now = std::time::Instant::now();
|
||||||
for (_id, entry) in fs.fs_root.iter() {
|
for (_id, entry) in fs.fs_root.iter() {
|
||||||
if let Some(dir) = entry.as_dir_index() {
|
if let Some(_dir) = entry.as_dir_index() {
|
||||||
//log::info!("{}", dir.name_as_string_lossy());
|
//log::info!("{}", dir.name_as_string_lossy());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1,223 @@
|
||||||
|
use btrfs::structs::{ExtentData, ObjectType};
|
||||||
|
use btrfs::v2::error::Result;
|
||||||
|
use btrfs::v2::tree::PartialKey;
|
||||||
|
use btrfs::v2::volume::*;
|
||||||
|
use include_blob::include_blob;
|
||||||
|
|
||||||
|
fn open_filesystem() -> Result<std::rc::Rc<Volume2<&'static [u8]>>> {
|
||||||
|
let filesystem_data = include_blob!("simple.img").as_slice();
|
||||||
|
|
||||||
|
let volume = Volume::new(filesystem_data)?.into_volume2()?;
|
||||||
|
|
||||||
|
Ok(volume)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_filesystem_lzo() -> Result<std::rc::Rc<Volume2<&'static [u8]>>> {
|
||||||
|
let filesystem_data = include_blob!("compressed-lzo.img").as_slice();
|
||||||
|
|
||||||
|
let volume = Volume::new(filesystem_data)?.into_volume2()?;
|
||||||
|
|
||||||
|
Ok(volume)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_filesystem_zlib() -> Result<std::rc::Rc<Volume2<&'static [u8]>>> {
|
||||||
|
let filesystem_data = include_blob!("compressed-zlib.img").as_slice();
|
||||||
|
|
||||||
|
let volume = Volume::new(filesystem_data)?.into_volume2()?;
|
||||||
|
|
||||||
|
Ok(volume)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_filesystem_zstd() -> Result<std::rc::Rc<Volume2<&'static [u8]>>> {
|
||||||
|
let filesystem_data = include_blob!("compressed-zstd.img").as_slice();
|
||||||
|
|
||||||
|
let volume = Volume::new(filesystem_data)?.into_volume2()?;
|
||||||
|
|
||||||
|
Ok(volume)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn asdf() -> Result<()> {
|
||||||
|
let a = open_filesystem()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn read_superblock() -> Result<()> {
|
||||||
|
let vol2 = open_filesystem()?;
|
||||||
|
let sb = vol2.inner.superblock();
|
||||||
|
println!("{sb:#?}");
|
||||||
|
|
||||||
|
assert!(sb.verify_magic());
|
||||||
|
assert!(sb.verify_checksum());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn iter_roots() -> Result<()> {
|
||||||
|
let vol2 = open_filesystem()?;
|
||||||
|
for (id, _) in vol2.roots.iter() {
|
||||||
|
log::info!("[{id:?}] ");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn iter_roots_rev() -> Result<()> {
|
||||||
|
let vol2 = open_filesystem()?;
|
||||||
|
for (id, _) in vol2.roots.iter().rev() {
|
||||||
|
log::info!("[{id:?}] ");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn iter_default_subvol() -> Result<()> {
|
||||||
|
let v2 = open_filesystem()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
log::info!("files 1:");
|
||||||
|
let now = std::time::Instant::now();
|
||||||
|
for (_id, entry) in fs.fs_root().iter() {
|
||||||
|
if let Some(_dir) = entry.as_dir_index() {
|
||||||
|
//log::info!("{}", dir.name_as_string_lossy());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log::info!("files 1: [took {}ms]", now.elapsed().as_millis());
|
||||||
|
|
||||||
|
log::info!("files 2:");
|
||||||
|
let now = std::time::Instant::now();
|
||||||
|
for (_id, entry) in fs.fs_root().iter() {
|
||||||
|
if let Some(_dir) = entry.as_dir_index() {
|
||||||
|
//log::info!("{}", dir.name_as_string_lossy());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log::info!("files 2: [took {}ms]", now.elapsed().as_millis());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn get_inode_items() -> Result<()> {
|
||||||
|
let v2 = open_filesystem()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
let search_key = PartialKey::new(fs.root_dir_id(), Some(ObjectType::DirIndex), None);
|
||||||
|
|
||||||
|
// with range
|
||||||
|
|
||||||
|
log::info!("range:");
|
||||||
|
for (key, v) in fs.fs_root().find_range(&search_key)? {
|
||||||
|
let dirindex = v.as_dir_index().unwrap();
|
||||||
|
let inode_id: u64 = dirindex.item().location.id().into();
|
||||||
|
log::info!("[{key:?}] {v:#?}");
|
||||||
|
log::info!("inode: {inode_id}");
|
||||||
|
|
||||||
|
let inode_item = fs.get_inode_item(inode_id)?;
|
||||||
|
log::info!("inode: {inode_item:#?}");
|
||||||
|
let extents = fs.get_inode_extents(inode_id)?;
|
||||||
|
|
||||||
|
for (_, extent) in extents {
|
||||||
|
match extent {
|
||||||
|
ExtentData::Inline { header, data } => {
|
||||||
|
log::info!("{header:?}\n{}", String::from_utf8_lossy(&data));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log::info!("range: [end]");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn find_file() -> Result<()> {
|
||||||
|
let v2 = open_filesystem()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
let root_dir = fs.get_root_dir();
|
||||||
|
let children = fs.get_inode_children(&root_dir)?.collect::<Vec<_>>();
|
||||||
|
log::info!("chidlren: {:?}", children);
|
||||||
|
|
||||||
|
let cmake_list = fs.get_inode_by_path(b"/quibble/LICENCE")?;
|
||||||
|
let file_contents = fs.read_inode_raw(&cmake_list, ..).expect("file contents");
|
||||||
|
assert_eq!(
|
||||||
|
&file_contents[..52],
|
||||||
|
b" GNU LESSER GENERAL PUBLIC LICENSE"
|
||||||
|
);
|
||||||
|
log::info!("license file:");
|
||||||
|
log::info!("{}", String::from_utf8_lossy(&file_contents));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn find_file_zlib() -> Result<()> {
|
||||||
|
let v2 = open_filesystem_zlib()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
let root_dir = fs.get_root_dir();
|
||||||
|
let children = fs.get_inode_children(&root_dir)?.collect::<Vec<_>>();
|
||||||
|
log::info!("chidlren: {:?}", children);
|
||||||
|
|
||||||
|
let cmake_list = fs.get_inode_by_path(b"/quibble/LICENCE")?;
|
||||||
|
let file_contents = fs
|
||||||
|
.read_inode_raw(&cmake_list, ..100)
|
||||||
|
.expect("file contents");
|
||||||
|
//assert_eq!(&file_contents[..11], b"hello world");
|
||||||
|
log::info!("license file:");
|
||||||
|
log::info!("{}", String::from_utf8_lossy(&file_contents));
|
||||||
|
assert_eq!(
|
||||||
|
&file_contents[..52],
|
||||||
|
b" GNU LESSER GENERAL PUBLIC LICENSE"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn find_file_lzo() -> Result<()> {
|
||||||
|
let v2 = open_filesystem_lzo()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
let root_dir = fs.get_root_dir();
|
||||||
|
let children = fs.get_inode_children(&root_dir)?.collect::<Vec<_>>();
|
||||||
|
log::info!("chidlren: {:?}", children);
|
||||||
|
|
||||||
|
let cmake_list = fs.get_inode_by_path(b"/quibble/LICENCE")?;
|
||||||
|
let file_contents = fs.read_inode_raw(&cmake_list, ..).expect("file contents");
|
||||||
|
assert_eq!(
|
||||||
|
&file_contents[..52],
|
||||||
|
b" GNU LESSER GENERAL PUBLIC LICENSE"
|
||||||
|
);
|
||||||
|
log::info!("license file:");
|
||||||
|
log::info!("{}", String::from_utf8_lossy(&file_contents));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_log::test]
|
||||||
|
fn find_file_zstd() -> Result<()> {
|
||||||
|
let v2 = open_filesystem_zstd()?;
|
||||||
|
let fs = v2.default_subvolume().expect("default subvol");
|
||||||
|
|
||||||
|
let root_dir = fs.get_root_dir();
|
||||||
|
let children = fs.get_inode_children(&root_dir)?.collect::<Vec<_>>();
|
||||||
|
log::info!("chidlren: {:?}", children);
|
||||||
|
|
||||||
|
let cmake_list = fs.get_inode_by_path(b"/quibble/LICENCE")?;
|
||||||
|
let file_contents = fs
|
||||||
|
.read_inode_raw(&cmake_list, ..100)
|
||||||
|
.expect("file contents");
|
||||||
|
assert_eq!(
|
||||||
|
&file_contents[..52],
|
||||||
|
b" GNU LESSER GENERAL PUBLIC LICENSE"
|
||||||
|
);
|
||||||
|
log::info!("license file:");
|
||||||
|
log::info!("{}", String::from_utf8_lossy(&file_contents));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue