zstd decompression support

- extracted common code out of get_inode_by_path functions
This commit is contained in:
Janis 2023-04-12 20:29:42 +02:00
parent 43a92bedcf
commit 05c0c7df8f
2 changed files with 81 additions and 58 deletions

View file

@ -24,6 +24,7 @@ num_enum = {version = "0.5.11", default-features = false}
replace_with = "0.1.7" replace_with = "0.1.7"
miniz_oxide = {version = "0.7.1"} miniz_oxide = {version = "0.7.1"}
zstd-safe = "6.0.5+zstd.1.5.4"
[dev-dependencies] [dev-dependencies]

View file

@ -6,7 +6,7 @@ use alloc::{collections::BTreeMap, rc::Rc, vec, vec::Vec};
use scroll::Pread; use scroll::Pread;
use crate::crc32c::calculate_crc32c; use crate::crc32c::calculate_crc32c;
use crate::path::Path; use crate::path::{NormalizedPath, Path};
use crate::structs::{ use crate::structs::{
Chunk, CompressionType, DirItemEntry, DirItemType, ExtentData, INodeItem, INodeRefEntry, Item, Chunk, CompressionType, DirItemEntry, DirItemType, ExtentData, INodeItem, INodeRefEntry, Item,
Key, KeyPtr, KnownObjectId, ObjectType, RootItem, Stripe, Superblock, TreeItem, Key, KeyPtr, KnownObjectId, ObjectType, RootItem, Stripe, Superblock, TreeItem,
@ -430,13 +430,35 @@ impl<R: super::Read> Fs<R> {
P: Path, P: Path,
{ {
if path.is_absolute() { if path.is_absolute() {
// stuff
self.get_inode_by_path(path) self.get_inode_by_path(path)
} else { } else {
let path = path.normalize().into_iter(); self.get_inode_by_relative_normalized_path(inode, path.normalize())
}
}
pub fn get_inode_by_path<P>(&self, path: P) -> Result<INode>
where
P: Path,
{
let mut normalized = path.normalize();
if !path.is_absolute() {
log::error!("path is not absolute!");
} else {
// pop root
_ = normalized.pop_segment();
}
self.get_inode_by_relative_normalized_path(self.get_root_dir(), normalized)
}
pub fn get_inode_by_relative_normalized_path(
&self,
inode: INode,
path: NormalizedPath,
) -> Result<INode> {
let mut inode = inode; let mut inode = inode;
for segment in path { for segment in path.iter() {
match segment { match segment {
crate::path::Segment::ParentDir => { crate::path::Segment::ParentDir => {
inode = self.get_inode_parent(&inode)?; inode = self.get_inode_parent(&inode)?;
@ -458,40 +480,6 @@ impl<R: super::Read> Fs<R> {
Ok(inode) Ok(inode)
} }
}
pub fn get_inode_by_path<P>(&self, path: P) -> Result<INode>
where
P: Path,
{
let mut normalized = path.normalize();
if !path.is_absolute() {
log::error!("path is not absolute!");
} else {
// pop root
_ = normalized.pop_segment();
}
let mut inode = self.get_root_dir();
while let Some(segment) = normalized.pop_segment() {
match segment {
crate::path::Segment::Root | crate::path::Segment::NoOp => {} // do nothing
crate::path::Segment::CurrentDir | crate::path::Segment::ParentDir => {
unimplemented!()
} // not normalized?
crate::path::Segment::File(child) => {
let dir_item = self
.find_inode_child(inode.id, child)?
.ok_or(Error::INodeNotFound)?;
inode = inode.into_child(dir_item.item().location.id().into(), child.to_vec());
}
}
}
Ok(inode)
}
fn find_inode_child(&self, parent_inode: u64, child: &[u8]) -> Result<Option<DirItemEntry>> { fn find_inode_child(&self, parent_inode: u64, child: &[u8]) -> Result<Option<DirItemEntry>> {
let crc = calculate_crc32c(0xfffffffe, child); let crc = calculate_crc32c(0xfffffffe, child);
@ -565,6 +553,9 @@ impl<R: super::Read> Fs<R> {
core::ops::Bound::Unbounded => None, core::ops::Bound::Unbounded => None,
}; };
// FIXME: offsets need to be calculated with the uncompressed length and offset
// currently are calculated with compressed length and offset afaik
log::info!("extents: {}", extents.len()); log::info!("extents: {}", extents.len());
log::info!("{:?}", extents); log::info!("{:?}", extents);
for (offset, extent) in extents.into_iter().filter(|(offset, extent)| { for (offset, extent) in extents.into_iter().filter(|(offset, extent)| {
@ -608,7 +599,6 @@ impl<R: super::Read> Fs<R> {
start < extent_end start < extent_end
} }
}) { }) {
//
let start = start.saturating_sub(offset); let start = start.saturating_sub(offset);
let end = end.map(|end| end - offset).unwrap_or(start + extent.len()); let end = end.map(|end| end - offset).unwrap_or(start + extent.len());
@ -619,22 +609,20 @@ impl<R: super::Read> Fs<R> {
let data: alloc::borrow::Cow<[u8]> = match &extent { let data: alloc::borrow::Cow<[u8]> = match &extent {
ExtentData::Inline { data, .. } => (&data[start as usize..end as usize]).into(), ExtentData::Inline { data, .. } => (&data[start as usize..end as usize]).into(),
ExtentData::Other(extent) => { ExtentData::Other(extent) => {
let address = extent.address() + extent.offset() + start; let address = extent.address() + extent.offset();
let address = self let address = self
.volume .volume
.inner .inner
.offset_from_logical(address) .offset_from_logical(address)
.ok_or(Error::BadLogicalAddress)?; .ok_or(Error::BadLogicalAddress)?;
let range = address let range = match extent.extent_data1().compression() {
..(address
+ match extent.extent_data1().compression() {
// compressed size // compressed size
CompressionType::Zlib CompressionType::Zlib | CompressionType::Lzo | CompressionType::ZStd => {
| CompressionType::Lzo address..address + extent.size()
| CompressionType::ZStd => extent.size(), }
_ => len, _ => address + start..address + start + len,
}); };
let data = self.volume.inner.read_range(range).expect("bytes"); let data = self.volume.inner.read_range(range).expect("bytes");
data.into() data.into()
@ -644,8 +632,10 @@ impl<R: super::Read> Fs<R> {
log::info!("reading {} bytes from file", data.len()); log::info!("reading {} bytes from file", data.len());
log::info!("compression: {:?}", extent.header().compression()); log::info!("compression: {:?}", extent.header().compression());
let data = match extent.header().compression() { match extent.header().compression() {
CompressionType::None => data, CompressionType::None => {
contents.extend_from_slice(&data);
}
CompressionType::Zlib => { CompressionType::Zlib => {
let mut state = miniz_oxide::inflate::stream::InflateState::new( let mut state = miniz_oxide::inflate::stream::InflateState::new(
miniz_oxide::DataFormat::Zlib, miniz_oxide::DataFormat::Zlib,
@ -682,22 +672,54 @@ impl<R: super::Read> Fs<R> {
.status .status
.map_err(|_| Error::DecompressionError)?; .map_err(|_| Error::DecompressionError)?;
output_data.into() // truncate inflated data if needed
contents
.extend_from_slice(&output_data[start as usize..(start + len) as usize]);
} }
CompressionType::Lzo => { CompressionType::Lzo => {
todo!() todo!()
} }
CompressionType::ZStd => { CompressionType::ZStd => {
todo!() let mut output_data = vec![0u8; extent.header().decoded_size() as usize];
let mut zstd = zstd_safe::DCtx::create();
zstd.init().map_err(|e| {
log::error!("zstd init error: {}", zstd_safe::get_error_name(e));
Error::DecompressionError
})?;
let mut input = zstd_safe::InBuffer::around(&data);
let mut output = zstd_safe::OutBuffer::around(&mut output_data[..]);
loop {
match zstd.decompress_stream(&mut output, &mut input) {
Ok(len) => {
if len == 0 {
break;
}
}
Err(e) => {
log::error!(
"zstd decompress stream error: {}",
zstd_safe::get_error_name(e)
);
return Err(Error::DecompressionError);
}
}
if output.pos() == extent.header().decoded_size() as usize {
break;
}
}
contents
.extend_from_slice(&output_data[start as usize..(start + len) as usize]);
} }
c => { c => {
log::error!("invalid compression type {:?}", c); log::error!("invalid compression type {:?}", c);
data contents.extend_from_slice(&data);
}
} }
};
// truncate inflated data if needed
contents.extend_from_slice(&data[..len as usize]);
} }
Ok(contents) Ok(contents)