ttmp-rs/src/ttmp_extractor.rs

422 lines
16 KiB
Rust

use std::cell::RefCell;
use std::collections::HashMap;
use std::io::{Cursor, Read, Seek, SeekFrom, Write};
use flate2::read::DeflateDecoder;
use sqpack::{DatBlockHeader, DatStdFileBlockInfos, FileKind, LodBlock, ModelBlock, SqPackFileInfo, SqPackFileInfoHeader};
use sqpack::binrw::{BinRead, BinWriterExt, VecArgs};
use zip::ZipArchive;
use crate::Error;
use crate::error::Result;
use crate::model::manifest_kind::ManifestKind;
use crate::model::SimpleMod;
use crate::tracking_reader::TrackingReader;
use crate::util::{MAX_MODEL_LODS, read_struct};
#[doc(hidden)]
pub trait WriteSeek: Write + Seek {}
impl<T> WriteSeek for T
where T: Write + Seek {}
pub struct TtmpExtractor<R> {
manifest: ManifestKind,
zip: RefCell<ZipArchive<R>>,
}
impl<R: Read + Seek> TtmpExtractor<R> {
pub fn new(reader: R) -> Result<Self> {
let (manifest, zip) = crate::from_reader(reader)?;
Ok(Self {
manifest,
zip: RefCell::new(zip),
})
}
pub fn manifest(&self) -> &ManifestKind {
&self.manifest
}
pub fn into_manifest(self) -> ManifestKind {
self.manifest
}
pub fn extract_all<F>(&mut self, writer_func: F) -> Result<()>
where F: Fn(&ModFile) -> Result<Box<dyn WriteSeek>, std::io::Error> + 'static,
{
let all_files = self.all_files_sorted();
let mut zip = self.zip.borrow_mut();
let mut data_file = TrackingReader::new(zip.by_name("TTMPD.mpd")
.map_err(Error::MissingDataFile)?);
let mut buf = [0; 4096];
for mod_file in all_files {
let file = mod_file.file;
data_file.read = 0;
// get the writer to write this file into
let mut writer = writer_func(&mod_file)
.map_err(Error::Io)?;
let expected = file.mod_size;
let info: SqPackFileInfoHeader = read_struct(&mut data_file, &mut buf)?;
match info.kind {
FileKind::Empty => todo!(),
FileKind::Standard => {
Self::extract_standard_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
FileKind::Model => {
Self::extract_model_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
FileKind::Texture => {
Self::extract_texture_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
}
if data_file.read < expected {
let to_skip = expected - data_file.read;
Self::skip(&mut data_file, &mut buf, to_skip)?;
}
}
Ok(())
}
pub fn zip(&self) -> &RefCell<ZipArchive<R>> {
&self.zip
}
pub fn all_files_sorted(&self) -> Vec<ModFile> {
let mut all_files = Vec::new();
let mut seen_groups: HashMap<&str, usize> = HashMap::new();
if let ManifestKind::V2(pack) = &self.manifest {
if let Some(pages) = &pack.mod_pack_pages {
for page in pages {
for group in &page.mod_groups {
let seen = seen_groups.entry(&group.group_name).or_default();
*seen += 1;
for option in &group.option_list {
for file in &option.mods_jsons {
all_files.push(ModFile {
group: Some(&*option.group_name),
group_occurence: *seen,
option: Some(&*option.name),
file,
});
}
}
}
}
}
}
all_files.extend(self.manifest.simple_mods_list().iter().map(|file| ModFile {
group: None,
group_occurence: 0,
option: None,
file,
}));
all_files.sort_unstable_by_key(|file| file.file.mod_offset);
all_files
}
}
impl<R: Read> TtmpExtractor<R> {
pub fn extract_one_into<W: Write + Seek>(mod_file: &ModFile, mut reader: R, mut writer: W) -> Result<()> {
let mut reader = TrackingReader::new(&mut reader);
let mut buf = [0; 4096];
let file = mod_file.file;
let expected = file.mod_size;
let info: SqPackFileInfoHeader = read_struct(&mut reader, &mut buf)?;
match info.kind {
FileKind::Empty => todo!(),
FileKind::Standard => {
Self::extract_standard_file(&info, &mut reader, &mut writer, &mut buf)?;
}
FileKind::Model => {
Self::extract_model_file(&info, &mut reader, &mut writer, &mut buf)?;
}
FileKind::Texture => {
Self::extract_texture_file(&info, &mut reader, &mut writer, &mut buf)?;
}
}
if reader.read < expected {
let to_skip = expected - reader.read;
Self::skip(&mut reader, &mut buf, to_skip)?;
}
Ok(())
}
fn extract_standard_file<T: Read, W: Write>(info: &SqPackFileInfoHeader, mut data_file: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let std_info: SqPackFileInfo = read_struct(&mut data_file, buf)?;
let blocks: Vec<DatStdFileBlockInfos> = (0..std_info.number_of_blocks)
.map(|_| read_struct(&mut data_file, buf))
.collect::<Result<_>>()?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<SqPackFileInfo>()
- std::mem::size_of::<DatStdFileBlockInfos>() * std_info.number_of_blocks as usize;
Self::skip(&mut data_file, buf, skip_amt)?;
for block in blocks {
Self::read_block_into(&mut data_file, &mut writer, buf, block.compressed_size as usize)?;
}
Ok(())
}
fn extract_model_file<T: Read, W: Write + Seek>(info: &SqPackFileInfoHeader, mut reader: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let model_info: ModelBlock = read_struct(&mut reader, buf)?;
let block_counts = &model_info.block_num;
let total_blocks = block_counts.stack
+ block_counts.runtime
+ block_counts.vertex_buffer.iter().sum::<u16>()
+ block_counts.edge_geometry_vertex_buffer.iter().sum::<u16>()
+ block_counts.index_buffer.iter().sum::<u16>();
let block_sizes: Vec<u16> = Self::read_vec(&mut reader, total_blocks as usize, buf)?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<ModelBlock>()
- std::mem::size_of::<u16>() * total_blocks as usize;
Self::skip(&mut reader, buf, skip_amt)?;
writer.seek(SeekFrom::Start(0x44)).map_err(Error::Io)?;
let stack_size = Self::read_blocks_into(
model_info.block_num.stack,
model_info.block_index.stack,
model_info.offset.stack,
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
let runtime_size = Self::read_blocks_into(
model_info.block_num.runtime,
model_info.block_index.runtime,
model_info.offset.runtime,
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
let mut vertex_data_offsets = [0u32; MAX_MODEL_LODS];
let mut vertex_buffer_sizes = [0u32; MAX_MODEL_LODS];
let mut index_data_offsets = [0u32; MAX_MODEL_LODS];
let mut index_buffer_sizes = [0u32; MAX_MODEL_LODS];
for lod_index in 0..MAX_MODEL_LODS {
// Vertex buffer
let block_count = model_info.block_num.vertex_buffer[lod_index];
if block_count != 0 {
if lod_index == 0 || block_count > 0 {
vertex_data_offsets[lod_index] = writer.stream_position().map_err(Error::Io)? as u32;
}
vertex_buffer_sizes[lod_index] = Self::read_blocks_into(
block_count,
model_info.block_index.vertex_buffer[lod_index],
model_info.offset.vertex_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
// Edge geometry vertex buffer
let block_count = model_info.block_num.edge_geometry_vertex_buffer[lod_index];
if block_count != 0 {
Self::read_blocks_into(
block_count,
model_info.block_index.edge_geometry_vertex_buffer[lod_index],
model_info.offset.edge_geometry_vertex_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
// Index buffer
let block_count = model_info.block_num.index_buffer[lod_index];
if block_count != 0 {
if lod_index == 0 || block_count > 0 {
index_data_offsets[lod_index] = writer.stream_position().map_err(Error::Io)? as u32;
}
index_buffer_sizes[lod_index] = Self::read_blocks_into(
block_count,
model_info.block_index.index_buffer[lod_index],
model_info.offset.index_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
}
// Write out the header now we've collected the info for it.
writer.seek(SeekFrom::Start(0)).map_err(Error::Io)?;
writer.write_le(&model_info.version).map_err(Error::BinRwWrite)?;
writer.write_le(&stack_size).map_err(Error::BinRwWrite)?;
writer.write_le(&runtime_size).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.vertex_declaration_num).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.material_num).map_err(Error::BinRwWrite)?;
writer.write_le(&vertex_data_offsets).map_err(Error::BinRwWrite)?;
writer.write_le(&index_data_offsets).map_err(Error::BinRwWrite)?;
writer.write_le(&vertex_buffer_sizes).map_err(Error::BinRwWrite)?;
writer.write_le(&index_buffer_sizes).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.num_lods).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.index_buffer_streaming_enabled).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.edge_geometry_enabled).map_err(Error::BinRwWrite)?;
writer.write_le(&0u8).map_err(Error::BinRwWrite)?;
Ok(())
}
fn extract_texture_file<T: Read, W: Write>(info: &SqPackFileInfoHeader, mut reader: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let std_info: SqPackFileInfo = read_struct(&mut reader, buf)?;
let blocks: Vec<LodBlock> = (0..std_info.number_of_blocks)
.map(|_| read_struct(&mut reader, buf))
.collect::<Result<_>>()?;
let sub_block_count = blocks
.iter()
.fold(0, |acc, block| acc + block.block_count);
let sub_block_sizes: Vec<u16> = Self::read_vec(&mut reader, sub_block_count as usize, buf)?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<SqPackFileInfo>()
- std::mem::size_of::<LodBlock>() * std_info.number_of_blocks as usize
- std::mem::size_of::<u16>() * sub_block_sizes.len();
Self::skip(&mut reader, buf, skip_amt)?;
let mip_map_size = blocks[0].compressed_offset;
if mip_map_size > 0 {
let mut reader = (&mut reader).take(mip_map_size as u64);
std::io::copy(&mut reader, &mut writer).map_err(Error::Io)?;
}
let mut sub_block = 0;
for block in blocks {
for _ in 0..block.block_count {
Self::read_block_into(&mut reader, &mut writer, buf, sub_block_sizes[sub_block] as usize)?;
sub_block += 1;
}
}
Ok(())
}
fn read_block_into<T: Read, W: Write>(mut reader: T, mut writer: W, buf: &mut [u8], size: usize) -> Result<u64> {
let header: DatBlockHeader = read_struct(&mut reader, buf)?;
let (read, actual) = if header.compressed_size == 32_000 {
// uncompressed
let mut reader = (&mut reader).take(header.uncompressed_size as u64);
let read = std::io::copy(&mut reader, &mut writer).map_err(Error::Io)?;
(read, read)
} else if header.compressed_size == 0 {
// https://github.com/TexTools/xivModdingFramework/issues/51
let zeroes: Vec<u8> = std::iter::repeat(0)
.take(header.uncompressed_size as usize)
.collect();
std::io::copy(&mut Cursor::new(zeroes), &mut writer).map_err(Error::Io)?;
(0, 0)
} else {
// compressed
let reader = (&mut reader).take(header.compressed_size as u64);
let mut decoder = DeflateDecoder::new(reader);
let read = std::io::copy(&mut decoder, &mut writer).map_err(Error::Io)?;
(header.compressed_size as u64, read)
};
if (header.size as usize + read as usize) < size as usize {
let to_skip = size
- header.size as usize
- read as usize;
Self::skip(&mut reader, buf, to_skip)?;
}
Ok(actual)
}
fn read_blocks_into<T: Read, W: Write>(
block_count: u16,
block_index: u16,
_section_offset: u32,
block_sizes: &[u16],
mut reader: T,
mut writer: W,
buf: &mut [u8],
) -> Result<u32> {
let sizes = &block_sizes[block_index as usize..block_index as usize + block_count as usize];
sizes
.iter()
.try_fold(0, |acc, &size| {
let bytes_read = Self::read_block_into(&mut reader, &mut writer, buf, size as usize)?;
Ok(acc + bytes_read as u32)
})
}
fn read_vec<S: BinRead<Args=()>, T: Read>(mut reader: T, amount: usize, buf: &mut [u8]) -> Result<Vec<S>> {
let size_needed = amount * std::mem::size_of::<S>();
if size_needed <= buf.len() {
reader.read_exact(&mut buf[..size_needed]).map_err(Error::Io)?;
<Vec<S>>::read_le_args(
&mut Cursor::new(&buf[..size_needed]),
VecArgs {
count: amount,
inner: (),
},
).map_err(Error::SqPackError)
} else {
let mut buf = vec![0; size_needed];
reader.read_exact(&mut buf).map_err(Error::Io)?;
<Vec<S>>::read_le_args(
&mut Cursor::new(buf),
VecArgs {
count: amount,
inner: (),
},
).map_err(Error::SqPackError)
}
}
fn skip<T: Read>(mut reader: T, buf: &mut [u8], amt: usize) -> Result<()> {
let mut left = amt;
while left > 0 {
let to_read = std::cmp::min(left, buf.len());
left -= reader.read(&mut buf[..to_read]).map_err(Error::Io)?;
}
Ok(())
}
}
#[derive(Debug)]
pub struct ModFile<'a> {
pub group: Option<&'a str>,
pub group_occurence: usize,
pub option: Option<&'a str>,
pub file: &'a SimpleMod,
}