Cleanup chunk reading

This commit is contained in:
Serial 2022-01-04 21:38:52 -05:00
parent 76e788243f
commit 6e61f07538
5 changed files with 55 additions and 32 deletions

View file

@ -76,7 +76,9 @@ pub enum LoftyError {
/// Errors that arise while parsing OGG pages
OggPage(ogg_pager::PageError),
/// Unable to convert bytes to a String
FromUtf8(std::string::FromUtf8Error),
StringFromUtf8(std::string::FromUtf8Error),
/// Unable to convert bytes to a str
StrFromUtf8(std::str::Utf8Error),
/// Represents all cases of [`std::io::Error`].
Io(std::io::Error),
}
@ -86,7 +88,8 @@ impl Display for LoftyError {
match self {
// Conversions
LoftyError::OggPage(ref err) => write!(f, "{}", err),
LoftyError::FromUtf8(ref err) => write!(f, "{}", err),
LoftyError::StringFromUtf8(ref err) => write!(f, "{}", err),
LoftyError::StrFromUtf8(ref err) => write!(f, "{}", err),
LoftyError::Io(ref err) => write!(f, "{}", err),
LoftyError::BadExtension(ext) => write!(f, "Found unknown file extension \"{}\"", ext),
@ -160,6 +163,12 @@ impl From<std::io::Error> for LoftyError {
impl From<std::string::FromUtf8Error> for LoftyError {
fn from(input: std::string::FromUtf8Error) -> Self {
LoftyError::FromUtf8(input)
LoftyError::StringFromUtf8(input)
}
}
impl From<std::str::Utf8Error> for LoftyError {
fn from(input: std::str::Utf8Error) -> Self {
LoftyError::StrFromUtf8(input)
}
}

View file

@ -58,15 +58,15 @@ where
}
comm = Some(chunks.content(data)?);
chunks.correct_position(data)?;
},
b"SSND" if read_properties => {
stream_len = chunks.size;
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
chunks.skip(data)?;
},
#[cfg(feature = "aiff_text_chunks")]
b"ANNO" => {
let value = String::from_utf8(chunks.content(data)?)?;
annotations.push(value);
annotations.push(chunks.read_string(data)?);
},
// These four chunks are expected to appear at most once per file,
// so there's no need to replace anything we already read
@ -88,28 +88,25 @@ where
text: String::from_utf8(text)?,
})
}
chunks.correct_position(data)?;
},
#[cfg(feature = "aiff_text_chunks")]
b"NAME" if text_chunks.name.is_none() => {
let value = String::from_utf8(chunks.content(data)?)?;
text_chunks.name = Some(value);
text_chunks.name = Some(chunks.read_string(data)?);
},
#[cfg(feature = "aiff_text_chunks")]
b"AUTH" if text_chunks.author.is_none() => {
let value = String::from_utf8(chunks.content(data)?)?;
text_chunks.author = Some(value);
text_chunks.author = Some(chunks.read_string(data)?);
},
#[cfg(feature = "aiff_text_chunks")]
b"(c) " if text_chunks.copyright.is_none() => {
let value = String::from_utf8(chunks.content(data)?)?;
text_chunks.copyright = Some(value);
text_chunks.copyright = Some(chunks.read_string(data)?);
},
_ => {
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
chunks.skip(data)?;
},
}
chunks.correct_position(data)?;
}
#[cfg(feature = "aiff_text_chunks")]

View file

@ -1,13 +1,13 @@
use crate::error::Result;
#[cfg(feature = "id3v2")]
use crate::id3::v2::read::parse_id3v2;
use crate::id3::v2::read_id3v2_header;
#[cfg(feature = "id3v2")]
use crate::id3::v2::tag::Id3v2Tag;
use std::io::{Read, Seek, SeekFrom};
use std::marker::PhantomData;
use crate::id3::v2::read_id3v2_header;
use byteorder::{ByteOrder, ReadBytesExt};
pub(crate) struct Chunks<B>
@ -38,6 +38,17 @@ impl<B: ByteOrder> Chunks<B> {
Ok(())
}
pub fn read_string<R>(&mut self, data: &mut R) -> Result<String>
where
R: Read + Seek,
{
let cont = self.content(data)?;
self.correct_position(data)?;
let value_str = std::str::from_utf8(&cont)?;
Ok(value_str.trim_matches('\0').to_string())
}
pub fn content<R>(&mut self, data: &mut R) -> Result<Vec<u8>>
where
R: Read,
@ -66,6 +77,8 @@ impl<B: ByteOrder> Chunks<B> {
data.seek(SeekFrom::Current(10))?;
}
self.correct_position(data)?;
Ok(id3v2)
}
@ -86,6 +99,18 @@ impl<B: ByteOrder> Chunks<B> {
data.seek(SeekFrom::Current(10))?;
}
self.correct_position(data)?;
Ok(())
}
pub fn skip<R>(&mut self, data: &mut R) -> Result<()>
where
R: Read + Seek,
{
data.seek(SeekFrom::Current(i64::from(self.size)))?;
self.correct_position(data)?;
Ok(())
}

View file

@ -52,7 +52,7 @@ where
if fmt.is_empty() {
fmt = chunks.content(data)?;
} else {
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
chunks.skip(data)?;
}
},
b"fact" if read_properties => {
@ -67,7 +67,7 @@ where
stream_len += chunks.size
}
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
chunks.skip(data)?;
},
b"LIST" => {
let mut list_type = [0; 4];
@ -77,23 +77,20 @@ where
if &list_type == b"INFO" {
let end = data.seek(SeekFrom::Current(0))? + u64::from(chunks.size - 4);
super::tag::read::parse_riff_info(data, end, &mut riff_info)?;
chunks.correct_position(data)?;
}
#[cfg(not(feature = "riff_info_list"))]
{
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
}
chunks.skip(data)?;
},
#[cfg(feature = "id3v2")]
b"ID3 " | b"id3 " => id3v2_tag = Some(chunks.id3_chunk(data)?),
#[cfg(not(feature = "id3v2"))]
b"ID3 " | b"id3 " => chunks.id3_chunk(data)?,
_ => {
data.seek(SeekFrom::Current(i64::from(chunks.size)))?;
chunks.skip(data)?;
},
}
chunks.correct_position(data)?;
}
let properties = if read_properties {

View file

@ -24,16 +24,11 @@ where
return Err(LoftyError::Wav("Non-ascii key found in RIFF INFO"));
}
let value = chunks.content(data)?;
chunks.correct_position(data)?;
let value_str = std::str::from_utf8(&value)
.map_err(|_| LoftyError::Wav("Non UTF-8 value found in RIFF INFO"))?;
tag.items.push((
key_str.to_string(),
value_str.trim_matches('\0').to_string(),
chunks
.read_string(data)
.map_err(|_| LoftyError::Wav("Failed to read the chunk value"))?,
));
}