chore: initial commit

This commit is contained in:
Anna 2022-09-16 02:49:51 -04:00
commit 91ab6c26f7
Signed by: anna
GPG Key ID: 0B391D8F06FCD9E0
17 changed files with 1068 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/target
/Cargo.lock

25
Cargo.toml Normal file
View File

@ -0,0 +1,25 @@
[package]
name = "ttmp"
version = "1.0.0"
edition = "2021"
autoexamples = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
flate2 = "1"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
thiserror = "1"
zip = { version = "0.6", default-features = false, features = ["deflate"] }
sqpack = { git = "https://git.anna.lgbt/ascclemens/ttmp-rs", features = ["read"] }
[dev-dependencies]
criterion = "0.4"
hex = "0.4"
sha3 = "0.10"
tempfile = "3"
[[bench]]
name = "extract"
harness = false

18
benches/extract.rs Normal file
View File

@ -0,0 +1,18 @@
use std::io::Cursor;
use criterion::{Criterion, criterion_group, criterion_main};
use ttmp::ttmp_extractor::TtmpExtractor;
fn extract(c: &mut Criterion) {
let ttmp2 = std::fs::read("schnapps.ttmp2").unwrap();
c.bench_function("extract", |b| b.iter(|| {
let mut extractor = TtmpExtractor::new(Cursor::new(&ttmp2)).unwrap();
extractor.extract_all(|_| {
Ok(Box::new(Cursor::new(Vec::new())))
}).unwrap();
}));
}
criterion_group!(benches, extract);
criterion_main!(benches);

344
examples/extract.rs Normal file
View File

@ -0,0 +1,344 @@
use std::collections::hash_map::DefaultHasher;
use std::fs::File;
use std::hash::{Hash, Hasher};
use std::io::ErrorKind;
use std::path::Path;
use ttmp::ttmp_extractor::TtmpExtractor;
pub fn main() {
let arg = std::env::args().skip(1).next().unwrap();
let file = File::open(&arg).unwrap();
let mut extractor = TtmpExtractor::new(file).unwrap();
extractor.extract_all(|file| {
if file.file.full_path.contains("../") {
return Err(std::io::Error::new(ErrorKind::Other, "tried to escape directory"));
}
let group = file.group.map(|s| {
let mut hasher = DefaultHasher::default();
s.hash(&mut hasher);
hasher.finish()
}).unwrap_or(0);
let option = file.option.map(|s| {
let mut hasher = DefaultHasher::default();
s.hash(&mut hasher);
hasher.finish()
}).unwrap_or(0);
let path = Path::new(".")
.join(format!("g{}", group))
.join(format!("o{}", option))
.join(&file.file.full_path);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)?;
}
println!("extracting {}", path.to_string_lossy());
Ok(Box::new(File::create(path)?))
}).unwrap();
}
// use std::fs::File;
// use std::io::{Cursor, Read, Seek, SeekFrom, Write};
// use std::path::Path;
//
// use flate2::read::DeflateDecoder;
// use sqpack::{DatBlockHeader, DatStdFileBlockInfos, FileKind, LodBlock, ModelBlock, SqPackFileInfoHeader};
// use sqpack::binrw::{BinRead, BinWriterExt, VecArgs};
// use sqpack::model::SqPackFileInfo;
//
// use ttmp::tracking_reader::TrackingReader;
//
// const MAX_LODS: usize = 3;
//
// pub fn main() {
// let arg = std::env::args().skip(1).next().unwrap();
// let file = File::open(&arg).unwrap();
// let (manifest, mut zip) = ttmp::from_reader(file).unwrap();
// let mut data_file = TrackingReader::new(zip.by_name("TTMPD.mpd").unwrap());
//
// let mut all_files = Vec::new();
//
// if let Some(pages) = &manifest.mod_pack_pages {
// for page in pages {
// for group in &page.mod_groups {
// for option in &group.option_list {
// for file in &option.mods_jsons {
// all_files.push(file);
// }
// }
// }
// }
// }
//
// if let Some(list) = &manifest.simple_mods_list {
// all_files.extend(list);
// }
//
// all_files.sort_unstable_by_key(|&file| file.mod_offset);
//
// let mut buf = [0; 4092];
// for file in all_files {
// data_file.read = 0;
// let path = &file.full_path;
// println!("extracting {}", path);
//
// if let Some(parent) = Path::new("./").join(Path::new(path)).parent() {
// std::fs::create_dir_all(parent).unwrap();
// }
//
// let expected = file.mod_size;
// let mut file = File::create(path).unwrap();
//
// let info: SqPackFileInfoHeader = read_struct(&mut data_file, &mut buf);
//
// match info.kind {
// FileKind::Empty => todo!(),
// FileKind::Standard => {
// let std_info: SqPackFileInfo = read_struct(&mut data_file, &mut buf);
// let blocks: Vec<DatStdFileBlockInfos> = (0..std_info.number_of_blocks)
// .map(|_| read_struct(&mut data_file, &mut buf))
// .collect();
//
// let skip_amt = info.size as usize
// - std::mem::size_of::<SqPackFileInfoHeader>()
// - std::mem::size_of::<SqPackFileInfo>()
// - std::mem::size_of::<DatStdFileBlockInfos>() * std_info.number_of_blocks as usize;
// skip(&mut data_file, &mut buf, skip_amt);
//
// for block in blocks {
// read_block_into(&mut data_file, &mut file, &mut buf, block.compressed_size as usize);
// }
// }
// FileKind::Model => {
// let model_info: ModelBlock = read_struct(&mut data_file, &mut buf);
// dbg!(&model_info);
//
// let block_counts = &model_info.block_num;
// let total_blocks = block_counts.stack
// + block_counts.runtime
// + block_counts.vertex_buffer.iter().sum::<u16>()
// + block_counts.edge_geometry_vertex_buffer.iter().sum::<u16>()
// + block_counts.index_buffer.iter().sum::<u16>();
// let block_sizes: Vec<u16> = read_vec(&mut data_file, total_blocks as usize);
//
// let skip_amt = info.size as usize
// - std::mem::size_of::<SqPackFileInfoHeader>()
// - std::mem::size_of::<ModelBlock>()
// - total_blocks as usize * std::mem::size_of::<u16>();
// skip(&mut data_file, &mut buf, skip_amt);
//
// file.seek(SeekFrom::Start(0x44)).unwrap();
//
// let stack_size = read_blocks_into(
// model_info.block_num.stack,
// model_info.block_index.stack,
// model_info.offset.stack,
// &block_sizes,
// &mut data_file,
// &mut file,
// &mut buf,
// );
//
// let runtime_size = read_blocks_into(
// model_info.block_num.runtime,
// model_info.block_index.runtime,
// model_info.offset.runtime,
// &block_sizes,
// &mut data_file,
// &mut file,
// &mut buf,
// );
//
// let mut vertex_data_offsets = [0u32; MAX_LODS];
// let mut vertex_buffer_sizes = [0u32; MAX_LODS];
//
// let mut index_data_offsets = [0u32; MAX_LODS];
// let mut index_buffer_sizes = [0u32; MAX_LODS];
//
// for lod_index in 0..MAX_LODS {
// // Vertex buffer
// let block_count = model_info.block_num.vertex_buffer[lod_index];
// if block_count != 0 {
// if lod_index == 0 || block_count > 0 {
// vertex_data_offsets[lod_index] = file.stream_position().unwrap().try_into().unwrap();
// }
//
// vertex_buffer_sizes[lod_index] = read_blocks_into(
// block_count,
// model_info.block_index.vertex_buffer[lod_index],
// // offset + model_info.offset.vertex_buffer[lod_index],
// model_info.offset.vertex_buffer[lod_index],
// &block_sizes,
// &mut data_file,
// &mut file,
// &mut buf,
// );
// }
//
// // Edge geometry vertex buffer
// let block_count = model_info.block_num.edge_geometry_vertex_buffer[lod_index];
// if block_count != 0 {
// read_blocks_into(
// block_count,
// model_info.block_index.edge_geometry_vertex_buffer[lod_index],
// // offset + model_info.offset.edge_geometry_vertex_buffer[lod_index],
// model_info.offset.edge_geometry_vertex_buffer[lod_index],
// &block_sizes,
// &mut data_file,
// &mut file,
// &mut buf,
// );
// }
//
// // Index buffer
// let block_count = model_info.block_num.index_buffer[lod_index];
// if block_count != 0 {
// if lod_index == 0 || block_count > 0 {
// index_data_offsets[lod_index] = file.stream_position().unwrap().try_into().unwrap();
// }
//
// index_buffer_sizes[lod_index] = read_blocks_into(
// block_count,
// model_info.block_index.index_buffer[lod_index],
// // offset + model_info.offset.index_buffer[lod_index],
// model_info.offset.index_buffer[lod_index],
// &block_sizes,
// &mut data_file,
// &mut file,
// &mut buf,
// );
// }
// }
//
// // Write out the header now we've collected the info for it.
// file.seek(SeekFrom::Start(0)).unwrap();
// file.write_le(&model_info.version).unwrap();
// file.write_le(&stack_size).unwrap();
// file.write_le(&runtime_size).unwrap();
// file.write_le(&model_info.vertex_declaration_num).unwrap();
// file.write_le(&model_info.material_num).unwrap();
// file.write_le(&vertex_data_offsets).unwrap();
// file.write_le(&index_data_offsets).unwrap();
// file.write_le(&vertex_buffer_sizes).unwrap();
// file.write_le(&index_buffer_sizes).unwrap();
// file.write_le(&model_info.num_lods).unwrap();
// file.write_le(&model_info.index_buffer_streaming_enabled).unwrap();
// file.write_le(&model_info.edge_geometry_enabled).unwrap();
// file.write_le(&0u8).unwrap();
// }
// FileKind::Texture => {
// let std_info: SqPackFileInfo = read_struct(&mut data_file, &mut buf);
// let blocks: Vec<LodBlock> = (0..std_info.number_of_blocks)
// .map(|_| read_struct(&mut data_file, &mut buf))
// .collect();
//
// let sub_block_count = blocks
// .iter()
// .fold(0, |acc, block| acc + block.block_count);
// let sub_block_sizes: Vec<u16> = read_vec(&mut data_file, sub_block_count as usize);
//
// let skip_amt = info.size as usize
// - std::mem::size_of::<SqPackFileInfoHeader>()
// - std::mem::size_of::<SqPackFileInfo>()
// - std::mem::size_of::<LodBlock>() * std_info.number_of_blocks as usize
// - std::mem::size_of::<u16>() * sub_block_sizes.len();
// skip(&mut data_file, &mut buf, skip_amt);
//
// let mip_map_size = blocks[0].compressed_offset;
// if mip_map_size > 0 {
// let mut reader = (&mut data_file).take(mip_map_size as u64);
// std::io::copy(&mut reader, &mut file).unwrap();
// }
//
// let mut sub_block = 0;
// for block in blocks {
// for _ in 0..block.block_count {
// read_block_into(&mut data_file, &mut file, &mut buf, sub_block_sizes[sub_block] as usize);
// sub_block += 1;
// }
// }
// }
// }
//
// if data_file.read < expected {
// let to_skip = expected - data_file.read;
// skip(&mut data_file, &mut buf, to_skip);
// }
// }
// }
//
// fn read_block_into<R: Read, W: Write>(reader: &mut R, writer: &mut W, buf: &mut [u8], size: usize) -> u64 {
// let header: DatBlockHeader = read_struct(reader, buf);
//
// let (read, actual) = if header.compressed_size == 32_000 {
// // uncompressed
// let mut reader = reader.take(header.uncompressed_size as u64);
// let read = std::io::copy(&mut reader, writer).unwrap();
// (read, read)
// } else {
// // compressed
// let reader = reader.take(header.compressed_size as u64);
// let mut decoder = DeflateDecoder::new(reader);
// let read = std::io::copy(&mut decoder, writer).unwrap();
// (header.compressed_size as u64, read)
// };
//
// if (header.size as usize + read as usize) < size as usize {
// let to_skip = size
// - header.size as usize
// - read as usize;
// skip(reader, buf, to_skip);
// }
//
// actual
// }
//
// fn read_blocks_into(
// block_count: u16,
// block_index: u16,
// _section_offset: u32,
// block_sizes: &[u16],
// reader: &mut impl Read,
// writer: &mut impl Write,
// buf: &mut [u8],
// ) -> u32 {
// let sizes = &block_sizes[block_index as usize..block_index as usize + block_count as usize];
//
// let mut total_read = 0u32;
// for &size in sizes {
// let bytes_read = read_block_into(reader, writer, buf, size as usize);
// total_read += bytes_read as u32;
// }
//
// total_read as u32
// }
//
// fn read_struct<S: BinRead, R: Read>(reader: &mut R, buf: &mut [u8]) -> S
// where S::Args: Default,
// {
// let size = std::mem::size_of::<S>();
// reader.read_exact(&mut buf[..size]).unwrap();
// S::read(&mut Cursor::new(&buf[..size])).unwrap()
// }
//
// fn skip<R: Read>(reader: &mut R, buf: &mut [u8], size: usize) {
// let mut left = size;
// while left > 0 {
// let to_read = std::cmp::min(left, buf.len());
// left -= reader.read(&mut buf[..to_read]).unwrap();
// }
// }
//
// fn read_vec<S: BinRead<Args=()>, R: Read>(reader: &mut R, amount: usize) -> Vec<S> {
// let mut buf = vec![0; amount * std::mem::size_of::<S>()];
// reader.read_exact(&mut buf).unwrap();
// <Vec<S>>::read_args(
// &mut Cursor::new(buf),
// VecArgs {
// count: amount,
// inner: (),
// },
// ).unwrap()
// }

View File

@ -0,0 +1,99 @@
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{Cursor, Seek, SeekFrom, Write};
use std::path::Path;
use sha3::{Digest, Sha3_256};
use ttmp::ttmp_extractor::{ModFile, TtmpExtractor};
pub fn main() {
let mut sha = Sha3_256::default();
let arg = std::env::args().skip(1).next().unwrap();
let file = File::open(&arg).unwrap();
let extractor = TtmpExtractor::new(file).unwrap();
let mut zip = extractor.zip().borrow_mut();
let files = extractor.all_files_sorted();
let mut data_file = zip.by_name("TTMPD.mpd").unwrap();
let version = &*extractor.manifest().version;
std::fs::create_dir_all("files").unwrap();
let mut hashes: HashMap<String, Vec<SavedFile>> = HashMap::with_capacity(files.len());
let mut temp = tempfile::tempfile().unwrap();
for file in files {
temp.set_len(0).unwrap();
temp.seek(SeekFrom::Start(0)).unwrap();
// write each file into a temp file, then hash
// mod files can get quite large, so storing them entirely in memory is probably a bad idea
// let mut cursor = Cursor::new(Vec::with_capacity(file.file.mod_size));
TtmpExtractor::extract_one_into(&file, &mut data_file, &mut temp).unwrap();
// let data = cursor.into_inner();
// sha.update(&data);
temp.seek(SeekFrom::Start(0)).unwrap();
std::io::copy(&mut temp, &mut sha).unwrap();
temp.seek(SeekFrom::Start(0)).unwrap();
let hash = sha.finalize_reset();
let hash = hex::encode(&*hash);
let new = !hashes.contains_key(&hash);
let saved = SavedFile {
author: extractor.manifest().author.clone(),
package: extractor.manifest().name.clone(),
package_version: extractor.manifest().version.clone(),
game_path: file.file.full_path.clone(),
group: file.group.map(ToOwned::to_owned),
option: file.option.map(ToOwned::to_owned),
};
hashes.entry(hash.clone()).or_default().push(saved);
if new {
let path = Path::new("files").join(&hash);
std::io::copy(&mut temp, &mut File::create(&path).unwrap()).unwrap();
// std::fs::write(&path, data).unwrap();
println!("writing {}", path.to_string_lossy());
}
}
println!("{:#?}", hashes);
}
#[derive(Debug)]
pub struct SavedFile {
pub author: String,
pub package: String,
pub package_version: String,
pub game_path: String,
pub group: Option<String>,
pub option: Option<String>,
}
struct Multiplexer<W1, W2> {
one: W1,
two: W2,
}
impl<W1, W2> Multiplexer<W1, W2> {
fn new(one: W1, two: W2) -> Self {
Self {
one,
two,
}
}
}
impl<W1: Write, W2: Write> Write for Multiplexer<W1, W2> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let one = self.one.write(buf);
let two = self.two.write(buf);
one.and(two)
}
fn flush(&mut self) -> std::io::Result<()> {
let one = self.one.flush();
let two = self.two.flush();
one.and(two)
}
}

View File

@ -0,0 +1,8 @@
use std::fs::File;
pub fn main() {
let arg = std::env::args().skip(1).next().unwrap();
let file = File::open(&arg).unwrap();
let (manifest, _) = ttmp::from_reader(file).unwrap();
println!("{:#?}", manifest);
}

19
src/error.rs Normal file
View File

@ -0,0 +1,19 @@
use thiserror::Error;
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Debug, Error)]
pub enum Error {
#[error("error processing mod zip file")]
Zip(#[from] zip::result::ZipError),
#[error("io error reading/extracting mod")]
Io(#[from] std::io::Error),
#[error("invalid mod manifest")]
InvalidManifest(#[from] serde_json::Error),
#[error("the ttmp's data file was missing or corrupt")]
MissingDataFile(zip::result::ZipError),
#[error("the ttmp data file was corrupt")]
SqPackError(#[from] sqpack::binrw::Error),
#[error("error writing to output")]
BinRwWrite(sqpack::binrw::Error),
}

18
src/lib.rs Normal file
View File

@ -0,0 +1,18 @@
use std::io::{Read, Seek};
use zip::ZipArchive;
use crate::error::{Error, Result};
use crate::model::ModPack;
pub mod model;
pub mod error;
pub(crate) mod tracking_reader;
pub mod ttmp_extractor;
pub fn from_reader<R: Read + Seek>(reader: R) -> Result<(ModPack, ZipArchive<R>)> {
let mut zip = ZipArchive::new(reader).map_err(Error::Zip)?;
let manifest = zip.by_name("TTMPL.mpl").map_err(Error::Zip)?;
let manifest = serde_json::from_reader(manifest).map_err(Error::InvalidManifest)?;
Ok((manifest, zip))
}

15
src/model/mod.rs Normal file
View File

@ -0,0 +1,15 @@
pub use self::{
mod_group::ModGroup,
mod_option::ModOption,
mod_pack::ModPack,
mod_pack_page::ModPackPage,
selection_type::SelectionType,
simple_mod::SimpleMod,
};
pub mod mod_group;
pub mod mod_option;
pub mod mod_pack;
pub mod mod_pack_page;
pub mod selection_type;
pub mod simple_mod;

11
src/model/mod_group.rs Normal file
View File

@ -0,0 +1,11 @@
use serde::{Deserialize, Serialize};
use crate::model::{ModOption, SelectionType};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct ModGroup {
pub group_name: String,
pub selection_type: SelectionType,
pub option_list: Vec<ModOption>,
}

15
src/model/mod_option.rs Normal file
View File

@ -0,0 +1,15 @@
use serde::{Deserialize, Serialize};
use crate::model::{SelectionType, SimpleMod};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct ModOption {
pub name: String,
pub description: Option<String>,
pub image_path: Option<String>,
pub mods_jsons: Vec<SimpleMod>,
pub group_name: String,
pub selection_type: SelectionType,
pub is_checked: bool,
}

18
src/model/mod_pack.rs Normal file
View File

@ -0,0 +1,18 @@
use serde::{Deserialize, Serialize};
use crate::model::{ModPackPage, SimpleMod};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct ModPack {
pub minimum_framework_version: String,
#[serde(rename = "TTMPVersion")]
pub ttmp_version: String,
pub name: String,
pub author: String,
pub version: String,
pub description: String,
pub url: String,
pub mod_pack_pages: Option<Vec<ModPackPage>>,
pub simple_mods_list: Option<Vec<SimpleMod>>,
}

View File

@ -0,0 +1,10 @@
use serde::{Deserialize, Serialize};
use crate::model::ModGroup;
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct ModPackPage {
pub page_index: i32,
pub mod_groups: Vec<ModGroup>,
}

View File

@ -0,0 +1,7 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum SelectionType {
Single,
Multi,
}

14
src/model/simple_mod.rs Normal file
View File

@ -0,0 +1,14 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct SimpleMod {
pub name: String,
pub category: String,
pub full_path: String,
pub dat_file: String,
pub is_default: bool,
pub mod_offset: usize,
pub mod_size: usize,
pub mod_pack_entry: Option<serde_json::Value>,
}

26
src/tracking_reader.rs Normal file
View File

@ -0,0 +1,26 @@
use std::io::Read;
pub struct TrackingReader<R> {
inner: R,
pub read: usize,
}
impl<R: Read> TrackingReader<R> {
pub fn new(reader: R) -> Self {
Self {
inner: reader,
read: 0,
}
}
}
impl<R: Read> Read for TrackingReader<R> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let res = self.inner.read(buf);
if let Ok(read) = res {
self.read += read;
}
res
}
}

419
src/ttmp_extractor.rs Normal file
View File

@ -0,0 +1,419 @@
use std::cell::RefCell;
use std::io::{Cursor, Read, Seek, SeekFrom, Write};
use flate2::read::DeflateDecoder;
use sqpack::{DatBlockHeader, DatStdFileBlockInfos, FileKind, LodBlock, ModelBlock, SqPackFileInfo, SqPackFileInfoHeader};
use sqpack::binrw::{BinRead, BinWriterExt, VecArgs};
use sqpack::binrw::meta::ReadEndian;
use zip::ZipArchive;
use crate::{Error, ModPack};
use crate::error::Result;
use crate::model::SimpleMod;
use crate::tracking_reader::TrackingReader;
#[doc(hidden)]
pub trait WriteSeek: Write + Seek {}
impl<T> WriteSeek for T
where T: Write + Seek {}
pub struct TtmpExtractor<R> {
manifest: ModPack,
zip: RefCell<ZipArchive<R>>,
}
impl<R: Read + Seek> TtmpExtractor<R> {
pub fn new(reader: R) -> Result<Self> {
let (manifest, zip) = crate::from_reader(reader)?;
Ok(Self {
manifest,
zip: RefCell::new(zip),
})
}
pub fn manifest(&self) -> &ModPack {
&self.manifest
}
pub fn into_manifest(self) -> ModPack {
self.manifest
}
pub fn extract_all<F>(&mut self, writer_func: F) -> Result<()>
where F: Fn(&ModFile) -> Result<Box<dyn WriteSeek>, std::io::Error> + 'static,
{
let all_files = self.all_files_sorted();
let mut zip = self.zip.borrow_mut();
let mut data_file = TrackingReader::new(zip.by_name("TTMPD.mpd")
.map_err(Error::MissingDataFile)?);
let mut buf = [0; 4096];
for mod_file in all_files {
let file = &*mod_file.file;
data_file.read = 0;
// get the writer to write this file into
let mut writer = writer_func(&mod_file)
.map_err(Error::Io)?;
let expected = file.mod_size;
let info: SqPackFileInfoHeader = Self::read_struct(&mut data_file, &mut buf)?;
match info.kind {
FileKind::Empty => todo!(),
FileKind::Standard => {
Self::extract_standard_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
FileKind::Model => {
Self::extract_model_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
FileKind::Texture => {
Self::extract_texture_file(&info, &mut data_file, &mut writer, &mut buf)?;
}
}
if data_file.read < expected {
let to_skip = expected - data_file.read;
Self::skip(&mut data_file, &mut buf, to_skip)?;
}
}
Ok(())
}
pub fn zip(&self) -> &RefCell<ZipArchive<R>> {
&self.zip
}
pub fn all_files_sorted(&self) -> Vec<ModFile> {
let mut all_files = Vec::new();
if let Some(pages) = &self.manifest.mod_pack_pages {
for page in pages {
for group in &page.mod_groups {
for option in &group.option_list {
for file in &option.mods_jsons {
all_files.push(ModFile {
group: Some(&*option.group_name),
option: Some(&*option.name),
file,
});
}
}
}
}
}
if let Some(list) = &self.manifest.simple_mods_list {
all_files.extend(list.iter().map(|file| ModFile {
group: None,
option: None,
file,
}));
}
all_files.sort_unstable_by_key(|file| file.file.mod_offset);
all_files
}
}
impl<R: Read> TtmpExtractor<R> {
pub fn extract_one_into<W: Write + Seek>(mod_file: &ModFile, mut reader: R, mut writer: W) -> Result<()> {
let mut reader = TrackingReader::new(&mut reader);
let mut buf = [0; 4096];
let file = &*mod_file.file;
let expected = file.mod_size;
let info: SqPackFileInfoHeader = Self::read_struct(&mut reader, &mut buf)?;
match info.kind {
FileKind::Empty => todo!(),
FileKind::Standard => {
Self::extract_standard_file(&info, &mut reader, &mut writer, &mut buf)?;
}
FileKind::Model => {
Self::extract_model_file(&info, &mut reader, &mut writer, &mut buf)?;
}
FileKind::Texture => {
Self::extract_texture_file(&info, &mut reader, &mut writer, &mut buf)?;
}
}
if reader.read < expected {
let to_skip = expected - reader.read;
Self::skip(&mut reader, &mut buf, to_skip)?;
}
Ok(())
}
fn extract_standard_file<T: Read, W: Write>(info: &SqPackFileInfoHeader, mut data_file: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let std_info: SqPackFileInfo = Self::read_struct(&mut data_file, buf)?;
let blocks: Vec<DatStdFileBlockInfos> = (0..std_info.number_of_blocks)
.map(|_| Self::read_struct(&mut data_file, buf))
.collect::<Result<_>>()?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<SqPackFileInfo>()
- std::mem::size_of::<DatStdFileBlockInfos>() * std_info.number_of_blocks as usize;
Self::skip(&mut data_file, buf, skip_amt)?;
for block in blocks {
Self::read_block_into(&mut data_file, &mut writer, buf, block.compressed_size as usize)?;
}
Ok(())
}
fn extract_model_file<T: Read, W: Write + Seek>(info: &SqPackFileInfoHeader, mut reader: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let model_info: ModelBlock = Self::read_struct(&mut reader, buf)?;
let block_counts = &model_info.block_num;
let total_blocks = block_counts.stack
+ block_counts.runtime
+ block_counts.vertex_buffer.iter().sum::<u16>()
+ block_counts.edge_geometry_vertex_buffer.iter().sum::<u16>()
+ block_counts.index_buffer.iter().sum::<u16>();
let block_sizes: Vec<u16> = Self::read_vec(&mut reader, total_blocks as usize, buf)?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<ModelBlock>()
- std::mem::size_of::<u16>() * total_blocks as usize;
Self::skip(&mut reader, buf, skip_amt)?;
writer.seek(SeekFrom::Start(0x44)).map_err(Error::Io)?;
let stack_size = Self::read_blocks_into(
model_info.block_num.stack,
model_info.block_index.stack,
model_info.offset.stack,
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
let runtime_size = Self::read_blocks_into(
model_info.block_num.runtime,
model_info.block_index.runtime,
model_info.offset.runtime,
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
const MAX_LODS: usize = 3;
let mut vertex_data_offsets = [0u32; MAX_LODS];
let mut vertex_buffer_sizes = [0u32; MAX_LODS];
let mut index_data_offsets = [0u32; MAX_LODS];
let mut index_buffer_sizes = [0u32; MAX_LODS];
for lod_index in 0..MAX_LODS {
// Vertex buffer
let block_count = model_info.block_num.vertex_buffer[lod_index];
if block_count != 0 {
if lod_index == 0 || block_count > 0 {
vertex_data_offsets[lod_index] = writer.stream_position().map_err(Error::Io)? as u32;
}
vertex_buffer_sizes[lod_index] = Self::read_blocks_into(
block_count,
model_info.block_index.vertex_buffer[lod_index],
model_info.offset.vertex_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
// Edge geometry vertex buffer
let block_count = model_info.block_num.edge_geometry_vertex_buffer[lod_index];
if block_count != 0 {
Self::read_blocks_into(
block_count,
model_info.block_index.edge_geometry_vertex_buffer[lod_index],
model_info.offset.edge_geometry_vertex_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
// Index buffer
let block_count = model_info.block_num.index_buffer[lod_index];
if block_count != 0 {
if lod_index == 0 || block_count > 0 {
index_data_offsets[lod_index] = writer.stream_position().map_err(Error::Io)? as u32;
}
index_buffer_sizes[lod_index] = Self::read_blocks_into(
block_count,
model_info.block_index.index_buffer[lod_index],
model_info.offset.index_buffer[lod_index],
&block_sizes,
&mut reader,
&mut writer,
buf,
)?;
}
}
// Write out the header now we've collected the info for it.
writer.seek(SeekFrom::Start(0)).map_err(Error::Io)?;
writer.write_le(&model_info.version).map_err(Error::BinRwWrite)?;
writer.write_le(&stack_size).map_err(Error::BinRwWrite)?;
writer.write_le(&runtime_size).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.vertex_declaration_num).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.material_num).map_err(Error::BinRwWrite)?;
writer.write_le(&vertex_data_offsets).map_err(Error::BinRwWrite)?;
writer.write_le(&index_data_offsets).map_err(Error::BinRwWrite)?;
writer.write_le(&vertex_buffer_sizes).map_err(Error::BinRwWrite)?;
writer.write_le(&index_buffer_sizes).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.num_lods).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.index_buffer_streaming_enabled).map_err(Error::BinRwWrite)?;
writer.write_le(&model_info.edge_geometry_enabled).map_err(Error::BinRwWrite)?;
writer.write_le(&0u8).map_err(Error::BinRwWrite)?;
Ok(())
}
fn extract_texture_file<T: Read, W: Write>(info: &SqPackFileInfoHeader, mut reader: T, mut writer: W, buf: &mut [u8]) -> Result<()> {
let std_info: SqPackFileInfo = Self::read_struct(&mut reader, buf)?;
let blocks: Vec<LodBlock> = (0..std_info.number_of_blocks)
.map(|_| Self::read_struct(&mut reader, buf))
.collect::<Result<_>>()?;
let sub_block_count = blocks
.iter()
.fold(0, |acc, block| acc + block.block_count);
let sub_block_sizes: Vec<u16> = Self::read_vec(&mut reader, sub_block_count as usize, buf)?;
let skip_amt = info.size as usize
- std::mem::size_of::<SqPackFileInfoHeader>()
- std::mem::size_of::<SqPackFileInfo>()
- std::mem::size_of::<LodBlock>() * std_info.number_of_blocks as usize
- std::mem::size_of::<u16>() * sub_block_sizes.len();
Self::skip(&mut reader, buf, skip_amt)?;
let mip_map_size = blocks[0].compressed_offset;
if mip_map_size > 0 {
let mut reader = (&mut reader).take(mip_map_size as u64);
std::io::copy(&mut reader, &mut writer).map_err(Error::Io)?;
}
let mut sub_block = 0;
for block in blocks {
for _ in 0..block.block_count {
Self::read_block_into(&mut reader, &mut writer, buf, sub_block_sizes[sub_block] as usize)?;
sub_block += 1;
}
}
Ok(())
}
fn read_struct<S: BinRead + ReadEndian, T: Read>(mut reader: T, buf: &mut [u8]) -> Result<S>
where S::Args: Default,
{
let size = std::mem::size_of::<S>();
if size > buf.len() {
panic!();
}
reader.read_exact(&mut buf[..size]).map_err(Error::Io)?;
S::read(&mut Cursor::new(&buf[..size]))
.map_err(Error::SqPackError)
}
fn read_block_into<T: Read, W: Write>(mut reader: T, mut writer: W, buf: &mut [u8], size: usize) -> Result<u64> {
let header: DatBlockHeader = Self::read_struct(&mut reader, buf)?;
let (read, actual) = if header.compressed_size == 32_000 {
// uncompressed
let mut reader = (&mut reader).take(header.uncompressed_size as u64);
let read = std::io::copy(&mut reader, &mut writer).map_err(Error::Io)?;
(read, read)
} else {
// compressed
let reader = (&mut reader).take(header.compressed_size as u64);
let mut decoder = DeflateDecoder::new(reader);
let read = std::io::copy(&mut decoder, &mut writer).map_err(Error::Io)?;
(header.compressed_size as u64, read)
};
if (header.size as usize + read as usize) < size as usize {
let to_skip = size
- header.size as usize
- read as usize;
Self::skip(&mut reader, buf, to_skip)?;
}
Ok(actual)
}
fn read_blocks_into<T: Read, W: Write>(
block_count: u16,
block_index: u16,
_section_offset: u32,
block_sizes: &[u16],
mut reader: T,
mut writer: W,
buf: &mut [u8],
) -> Result<u32> {
let sizes = &block_sizes[block_index as usize..block_index as usize + block_count as usize];
sizes
.iter()
.try_fold(0, |acc, &size| {
let bytes_read = Self::read_block_into(&mut reader, &mut writer, buf, size as usize)?;
Ok(acc + bytes_read as u32)
})
}
fn read_vec<S: BinRead<Args=()>, T: Read>(mut reader: T, amount: usize, buf: &mut [u8]) -> Result<Vec<S>> {
let size_needed = amount * std::mem::size_of::<S>();
if size_needed <= buf.len() {
reader.read_exact(&mut buf[..size_needed]).map_err(Error::Io)?;
<Vec<S>>::read_le_args(
&mut Cursor::new(&buf[..size_needed]),
VecArgs {
count: amount,
inner: (),
},
).map_err(Error::SqPackError)
} else {
let mut buf = vec![0; size_needed];
reader.read_exact(&mut buf).map_err(Error::Io)?;
<Vec<S>>::read_le_args(
&mut Cursor::new(buf),
VecArgs {
count: amount,
inner: (),
},
).map_err(Error::SqPackError)
}
}
fn skip<T: Read>(mut reader: T, buf: &mut [u8], amt: usize) -> Result<()> {
let mut left = amt;
while left > 0 {
let to_read = std::cmp::min(left, buf.len());
left -= reader.read(&mut buf[..to_read]).map_err(Error::Io)?;
}
Ok(())
}
}
#[derive(Debug)]
pub struct ModFile<'a> {
pub group: Option<&'a str>,
pub option: Option<&'a str>,
pub file: &'a SimpleMod,
}