mirror of
https://github.com/Serial-ATA/lofty-rs
synced 2024-11-10 06:34:18 +00:00
Make ogg_pager
more general-purpose
This commit is contained in:
parent
5389efa386
commit
bc9fbd87ab
6 changed files with 292 additions and 38 deletions
|
@ -1,12 +1,12 @@
|
|||
[package]
|
||||
name = "ogg_pager"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
authors = ["Serial <69764315+Serial-ATA@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "A simple OGG page reader"
|
||||
repository = "https://github.com/Serial-ATA/lofty-rs"
|
||||
keywords = ["ogg", "vorbis"]
|
||||
keywords = ["ogg", "xiph"]
|
||||
categories = ["accessibility", "multimedia::audio"]
|
||||
|
||||
[dependencies]
|
||||
|
|
6
ogg_pager/README.md
Normal file
6
ogg_pager/README.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
# OGG pager
|
||||
[![Downloads](https://img.shields.io/crates/d/ogg_pager?style=for-the-badge&logo=rust)](https://crates.io/crates/ogg_pager)
|
||||
[![Version](https://img.shields.io/crates/v/ogg_pager?style=for-the-badge&logo=rust)](https://crates.io/crates/ogg_pager)
|
||||
[![Documentation](https://img.shields.io/badge/docs.rs-ogg_pager-informational?style=for-the-badge&logo=read-the-docs)](https://docs.rs/ogg_pager/)
|
||||
|
||||
A simple OGG page reader, creator, and paginator
|
|
@ -1,8 +1,10 @@
|
|||
use std::error::Error;
|
||||
use std::fmt;
|
||||
|
||||
/// Alias for `Result<T, PageError>`
|
||||
pub type Result<T> = std::result::Result<T, PageError>;
|
||||
|
||||
/// Errors that can occur while performing `Page` operations
|
||||
#[derive(Debug)]
|
||||
pub enum PageError {
|
||||
/// The reader contains a page with a nonzero version
|
||||
|
@ -11,6 +13,8 @@ pub enum PageError {
|
|||
BadSegmentCount,
|
||||
/// The reader contains a page without a magic signature (OggS)
|
||||
MissingMagic,
|
||||
/// The reader contains too much data for a single page
|
||||
TooMuchData,
|
||||
/// Any std::io::Error
|
||||
Io(std::io::Error),
|
||||
}
|
||||
|
@ -23,7 +27,8 @@ impl fmt::Display for PageError {
|
|||
},
|
||||
PageError::BadSegmentCount => write!(f, "Page has a segment count < 1"),
|
||||
PageError::MissingMagic => write!(f, "Page is missing a magic signature"),
|
||||
PageError::Io(..) => write!(f, "Encountered an std::io::Error"),
|
||||
PageError::TooMuchData => write!(f, "Too much data was provided"),
|
||||
PageError::Io(err) => write!(f, "{}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//! A simple OGG page reader
|
||||
|
||||
mod crc;
|
||||
mod error;
|
||||
|
||||
|
@ -8,45 +10,115 @@ use byteorder::{LittleEndian, ReadBytesExt};
|
|||
pub use crc::crc32;
|
||||
pub use error::{PageError, Result};
|
||||
|
||||
#[derive(Clone)]
|
||||
const CONTINUED_PACKET: u8 = 0x01;
|
||||
|
||||
/// The maximum page content size
|
||||
pub const MAX_CONTENT_SIZE: usize = 65025;
|
||||
/// The packet contains the first page of the logical bitstream
|
||||
pub const CONTAINS_FIRST_PAGE_OF_BITSTREAM: u8 = 0x02;
|
||||
/// The packet contains the last page of the logical bitstream
|
||||
pub const CONTAINS_LAST_PAGE_OF_BITSTREAM: u8 = 0x04;
|
||||
|
||||
/// An OGG page
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct Page {
|
||||
pub content: Vec<u8>,
|
||||
pub header_type: u8,
|
||||
content: Vec<u8>,
|
||||
header_type: u8,
|
||||
/// The page's absolute granule position
|
||||
pub abgp: u64,
|
||||
/// The page's stream serial number
|
||||
pub serial: u32,
|
||||
/// The page's sequence number
|
||||
pub seq_num: u32,
|
||||
pub checksum: u32,
|
||||
checksum: u32,
|
||||
/// The position in the stream the page started at
|
||||
pub start: u64,
|
||||
/// The position in the stream the page ended
|
||||
pub end: u64,
|
||||
segment_table: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Page {
|
||||
/// Create a new `Page`
|
||||
///
|
||||
/// This will have the following defaults:
|
||||
///
|
||||
/// * `checksum` = 0
|
||||
/// * `start` = 0
|
||||
/// * `end` = `content.len()`
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// See [`segment_table`]
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// use ogg_pager::CONTAINS_FIRST_PAGE_OF_BITSTREAM;
|
||||
///
|
||||
/// // Creating the identification header
|
||||
/// let ident_header_packet = vec![...];
|
||||
/// let stream_serial_number = 2784419176;
|
||||
///
|
||||
/// let page = Page::new(
|
||||
/// CONTAINS_FIRST_PAGE_OF_BITSTREAM,
|
||||
/// 0,
|
||||
/// stream_serial_number,
|
||||
/// ident_header_packet,
|
||||
/// );
|
||||
/// ```
|
||||
pub fn new(
|
||||
header_type_flag: u8,
|
||||
abgp: u64,
|
||||
stream_serial: u32,
|
||||
sequence_number: u32,
|
||||
content: Vec<u8>,
|
||||
) -> Result<Self> {
|
||||
let len = content.len();
|
||||
let segment_table = segment_table(len)?;
|
||||
|
||||
Ok(Self {
|
||||
content,
|
||||
header_type: header_type_flag,
|
||||
abgp,
|
||||
serial: stream_serial,
|
||||
seq_num: sequence_number,
|
||||
checksum: 0,
|
||||
start: 0,
|
||||
end: len as u64,
|
||||
segment_table,
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert the Page to Vec<u8> for writing
|
||||
///
|
||||
/// NOTE: This will write the checksum as is. It is likely [Page::gen_crc] will have
|
||||
/// to be used prior.
|
||||
pub fn as_bytes(&self) -> Vec<u8> {
|
||||
let mut bytes = Vec::new();
|
||||
let segments = self.segments();
|
||||
let segment_count = [segments.len() as u8];
|
||||
|
||||
bytes.extend(b"OggS".iter());
|
||||
bytes.extend([0_u8].iter());
|
||||
bytes.extend(self.header_type.to_le_bytes().iter());
|
||||
bytes.extend(self.abgp.to_le_bytes().iter());
|
||||
bytes.extend(self.serial.to_le_bytes().iter());
|
||||
bytes.extend(self.seq_num.to_le_bytes().iter());
|
||||
bytes.extend(self.checksum.to_le_bytes().iter());
|
||||
bytes.extend(segment_count.iter());
|
||||
bytes.extend(segments.iter());
|
||||
bytes.extend(b"OggS");
|
||||
bytes.push(0);
|
||||
bytes.extend(self.header_type.to_le_bytes());
|
||||
bytes.extend(self.abgp.to_le_bytes());
|
||||
bytes.extend(self.serial.to_le_bytes());
|
||||
bytes.extend(self.seq_num.to_le_bytes());
|
||||
bytes.extend(self.checksum.to_le_bytes());
|
||||
bytes.push(self.segment_table.len() as u8);
|
||||
bytes.extend(self.segment_table.iter());
|
||||
bytes.extend(self.content.iter());
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Returns the Page's segment table as Vec<u8>
|
||||
pub fn segments(&self) -> Vec<u8> {
|
||||
segments(&*self.content)
|
||||
}
|
||||
|
||||
/// Attempts to get a Page from a reader
|
||||
///
|
||||
/// Use `skip_content` to only read the header, and skip over the content.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * [`std::io::Error`]
|
||||
/// * [`PageError`]
|
||||
pub fn read<V>(data: &mut V, skip_content: bool) -> Result<Self>
|
||||
where
|
||||
V: Read + Seek,
|
||||
|
@ -84,10 +156,10 @@ impl Page {
|
|||
data.read_exact(&mut segment_table)?;
|
||||
|
||||
let mut content: Vec<u8> = Vec::new();
|
||||
let content_len = segment_table.iter().map(|&b| b as i64).sum();
|
||||
let content_len: u16 = segment_table.iter().map(|&b| u16::from(b)).sum();
|
||||
|
||||
if skip_content {
|
||||
data.seek(SeekFrom::Current(content_len))?;
|
||||
data.seek(SeekFrom::Current(i64::from(content_len)))?;
|
||||
} else {
|
||||
content = vec![0; content_len as usize];
|
||||
data.read_exact(&mut content)?;
|
||||
|
@ -104,6 +176,7 @@ impl Page {
|
|||
checksum,
|
||||
start,
|
||||
end,
|
||||
segment_table,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -113,21 +186,26 @@ impl Page {
|
|||
}
|
||||
|
||||
/// Extends the Page's content, returning another Page if too much data was provided
|
||||
///
|
||||
/// This will do nothing if `content` is greater than the max page size. In this case,
|
||||
/// [`paginate`] should be used.
|
||||
pub fn extend(&mut self, content: &[u8]) -> Option<Page> {
|
||||
let self_len = self.content.len();
|
||||
let content_len = content.len();
|
||||
|
||||
if self_len <= 65025 && self_len + content_len <= 65025 {
|
||||
if self_len + content_len <= MAX_CONTENT_SIZE {
|
||||
self.content.extend(content.iter());
|
||||
self.end += content_len as u64;
|
||||
|
||||
return None;
|
||||
}
|
||||
|
||||
if content_len <= 65025 {
|
||||
if content_len <= MAX_CONTENT_SIZE {
|
||||
let remaining = 65025 - self_len;
|
||||
|
||||
self.content.extend(content[0..remaining].iter());
|
||||
self.header_type = 0;
|
||||
self.abgp = 1_u64.wrapping_neg(); // -1 in two's complement indicates that no packets finish on this page
|
||||
self.end += remaining as u64;
|
||||
|
||||
let mut p = Page {
|
||||
|
@ -139,6 +217,7 @@ impl Page {
|
|||
checksum: 0,
|
||||
start: self.end,
|
||||
end: self.start + content.len() as u64,
|
||||
segment_table: vec![],
|
||||
};
|
||||
|
||||
p.gen_crc();
|
||||
|
@ -148,23 +227,123 @@ impl Page {
|
|||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a segment table based on the content
|
||||
pub fn segments(cont: &[u8]) -> Vec<u8> {
|
||||
let len = cont.len();
|
||||
|
||||
let mut last_len = (len % 255) as u8;
|
||||
if last_len == 0 {
|
||||
last_len = 255
|
||||
/// Returns the page's content
|
||||
pub fn content(&self) -> &[u8] {
|
||||
self.content.as_slice()
|
||||
}
|
||||
|
||||
let mut needed = len / 255;
|
||||
/// Consumes the page and returns it's content
|
||||
pub fn take_content(self) -> Vec<u8> {
|
||||
self.content
|
||||
}
|
||||
|
||||
/// Returns the page's header type flag
|
||||
pub fn header_type(&self) -> u8 {
|
||||
self.header_type
|
||||
}
|
||||
|
||||
/// Returns the page's checksum
|
||||
pub fn checksum(&self) -> u32 {
|
||||
self.checksum
|
||||
}
|
||||
|
||||
/// Returns the page's segment table
|
||||
pub fn segment_table(&self) -> &[u8] {
|
||||
self.segment_table.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::eval_order_dependence)]
|
||||
/// Create pages from a packet
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// use ogg_pager::paginate;
|
||||
///
|
||||
/// // Creating the comment header
|
||||
/// let comment_header_packet = vec![...];
|
||||
/// let stream_serial_number = 2784419176;
|
||||
///
|
||||
/// let pages = paginate(&*comment_header_packet, stream_serial_number, 0, 0);
|
||||
/// ```
|
||||
pub fn paginate(packet: &[u8], stream_serial: u32, abgp: u64, flags: u8) -> Vec<Page> {
|
||||
let mut pages = Vec::new();
|
||||
|
||||
let mut first_page = true;
|
||||
let mut pos = 0;
|
||||
|
||||
for (idx, page) in packet.chunks(MAX_CONTENT_SIZE).enumerate() {
|
||||
let p = Page {
|
||||
content: page.to_vec(),
|
||||
header_type: {
|
||||
if first_page {
|
||||
if flags & CONTAINS_FIRST_PAGE_OF_BITSTREAM == 0x02 {
|
||||
CONTAINS_LAST_PAGE_OF_BITSTREAM
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
CONTINUED_PACKET
|
||||
}
|
||||
},
|
||||
abgp,
|
||||
serial: stream_serial,
|
||||
seq_num: (idx + 1) as u32,
|
||||
checksum: 0,
|
||||
start: pos,
|
||||
end: {
|
||||
pos += page.len() as u64;
|
||||
pos
|
||||
},
|
||||
// Safe to unwrap, since we are working with chunks no bigger than the max page size
|
||||
segment_table: segment_table(page.len()).unwrap(),
|
||||
};
|
||||
|
||||
first_page = false;
|
||||
pages.push(p);
|
||||
}
|
||||
|
||||
if flags & CONTAINS_LAST_PAGE_OF_BITSTREAM == 0x04 {
|
||||
if let Some(last) = pages.last_mut() {
|
||||
last.header_type |= CONTAINS_LAST_PAGE_OF_BITSTREAM;
|
||||
}
|
||||
}
|
||||
|
||||
if pages.len() > 1 {
|
||||
let last_idx = pages.len() - 1;
|
||||
|
||||
for (idx, p) in pages.iter_mut().enumerate() {
|
||||
if idx == last_idx {
|
||||
break;
|
||||
}
|
||||
|
||||
p.abgp = 1_u64.wrapping_neg();
|
||||
}
|
||||
}
|
||||
|
||||
pages
|
||||
}
|
||||
|
||||
/// Creates a segment table based on the length
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// `length` > [`MAX_CONTENT_SIZE`]
|
||||
pub fn segment_table(length: usize) -> Result<Vec<u8>> {
|
||||
let last_len = (length % 255) as u8;
|
||||
|
||||
let mut needed = length / 255;
|
||||
if needed != 255 {
|
||||
needed += 1
|
||||
}
|
||||
|
||||
let mut segments = Vec::new();
|
||||
if needed > 255 {
|
||||
return Err(PageError::TooMuchData);
|
||||
}
|
||||
|
||||
let mut segments = Vec::with_capacity(needed);
|
||||
|
||||
for i in 0..needed {
|
||||
if i + 1 < needed {
|
||||
|
@ -174,5 +353,69 @@ pub fn segments(cont: &[u8]) -> Vec<u8> {
|
|||
}
|
||||
}
|
||||
|
||||
segments
|
||||
Ok(segments)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{paginate, Page};
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
fn opus_ident_header() {
|
||||
let expected = Page {
|
||||
content: vec![
|
||||
0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64, 0x01, 0x02, 0x38, 0x01, 0x80, 0xBB,
|
||||
0, 0, 0, 0, 0,
|
||||
],
|
||||
header_type: 2,
|
||||
abgp: 0,
|
||||
serial: 1759377061,
|
||||
seq_num: 0,
|
||||
checksum: 3579522525,
|
||||
start: 0,
|
||||
end: 47,
|
||||
segment_table: vec![0x13],
|
||||
};
|
||||
|
||||
let content = std::fs::read("test_assets/opus_ident_header.page").unwrap();
|
||||
|
||||
let page = Page::read(&mut Cursor::new(content), false).unwrap();
|
||||
|
||||
assert_eq!(expected, page);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn paginate_large() {
|
||||
let packet = std::fs::read("test_assets/large_comment_packet.page").unwrap();
|
||||
|
||||
let pages = paginate(&*packet, 1234, 0, 0);
|
||||
|
||||
let len = pages.len();
|
||||
|
||||
assert_eq!(len, 17);
|
||||
assert_eq!(
|
||||
len % 255,
|
||||
*pages.last().unwrap().segment_table.last().unwrap() as usize
|
||||
);
|
||||
|
||||
for (i, page) in pages.into_iter().enumerate() {
|
||||
assert_eq!(page.serial, 1234);
|
||||
|
||||
if i + 1 == len {
|
||||
assert_eq!(page.abgp, 0);
|
||||
} else {
|
||||
// -1
|
||||
assert_eq!(page.abgp, u64::MAX);
|
||||
}
|
||||
|
||||
assert_eq!(page.seq_num, (i + 1) as u32);
|
||||
|
||||
if i == 0 {
|
||||
assert_eq!(page.header_type, 0);
|
||||
} else {
|
||||
assert_eq!(page.header_type, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
BIN
ogg_pager/test_assets/large_comment_packet.page
Normal file
BIN
ogg_pager/test_assets/large_comment_packet.page
Normal file
Binary file not shown.
BIN
ogg_pager/test_assets/opus_ident_header.page
Normal file
BIN
ogg_pager/test_assets/opus_ident_header.page
Normal file
Binary file not shown.
Loading…
Reference in a new issue