Merge pull request #24 from goaaats/old_streams

Use old SqPackStream
This commit is contained in:
Adam 2021-04-16 08:47:40 +10:00 committed by GitHub
commit 5cea061384
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 588 additions and 8 deletions

View file

@ -1,10 +1,11 @@
using System; using System;
using System.IO; using System.IO;
using Lumina.Data; using Lumina.Data;
using Penumbra.Util;
namespace Penumbra.Importer namespace Penumbra.Importer
{ {
public class MagicTempFileStreamManagerAndDeleterFuckery : SqPackStream, IDisposable public class MagicTempFileStreamManagerAndDeleterFuckery : PenumbraSqPackStream, IDisposable
{ {
private readonly FileStream _fileStream; private readonly FileStream _fileStream;

View file

@ -1,5 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
@ -9,6 +10,7 @@ using Lumina.Data;
using Newtonsoft.Json; using Newtonsoft.Json;
using Penumbra.Importer.Models; using Penumbra.Importer.Models;
using Penumbra.Models; using Penumbra.Models;
using Penumbra.Util;
namespace Penumbra.Importer namespace Penumbra.Importer
{ {
@ -62,12 +64,28 @@ namespace Penumbra.Importer
fs.Close(); fs.Close();
} }
private SqPackStream GetMagicSqPackDeleterStream( ZipFile file, string entryName ) // You can in no way rely on any file paths in TTMPs so we need to just do this, sorry
private ZipEntry FindZipEntry( ZipFile file, string fileName )
{
for( var i = 0; i < file.Count; i++ )
{
var entry = file[ i ];
if( entry.Name.Contains( fileName ) )
return entry;
}
return null;
}
private PenumbraSqPackStream GetMagicSqPackDeleterStream( ZipFile file, string entryName )
{ {
State = ImporterState.WritingPackToDisk; State = ImporterState.WritingPackToDisk;
// write shitty zip garbage to disk // write shitty zip garbage to disk
var entry = file.GetEntry( entryName ); var entry = FindZipEntry( file, entryName );
Debug.Assert( entry != null, $"Could not find in mod zip: {entryName}" );
using var s = file.GetInputStream( entry ); using var s = file.GetInputStream( entry );
WriteZipEntryToTempFile( s ); WriteZipEntryToTempFile( s );
@ -80,8 +98,11 @@ namespace Penumbra.Importer
{ {
using var zfs = modPackFile.OpenRead(); using var zfs = modPackFile.OpenRead();
using var extractedModPack = new ZipFile( zfs ); using var extractedModPack = new ZipFile( zfs );
var mpl = extractedModPack.GetEntry( "TTMPL.mpl" );
var modRaw = GetStringFromZipEntry( extractedModPack, mpl, Encoding.UTF8 ); var mpl = FindZipEntry( extractedModPack, "TTMPL.mpl" );
Debug.Assert( mpl != null, "Could not find mod meta in ZIP." );
var modRaw = GetStringFromZipEntry( extractedModPack, mpl, Encoding.UTF8 );
// At least a better validation than going by the extension. // At least a better validation than going by the extension.
if( modRaw.Contains( "\"TTMPVersion\":" ) ) if( modRaw.Contains( "\"TTMPVersion\":" ) )
@ -277,7 +298,7 @@ namespace Penumbra.Importer
throw new NotImplementedException(); throw new NotImplementedException();
} }
private void ExtractSimpleModList( DirectoryInfo outDirectory, IEnumerable< SimpleMod > mods, SqPackStream dataStream ) private void ExtractSimpleModList( DirectoryInfo outDirectory, IEnumerable< SimpleMod > mods, PenumbraSqPackStream dataStream )
{ {
State = ImporterState.ExtractingModFiles; State = ImporterState.ExtractingModFiles;
@ -294,13 +315,13 @@ namespace Penumbra.Importer
} }
} }
private void ExtractMod( DirectoryInfo outDirectory, SimpleMod mod, SqPackStream dataStream ) private void ExtractMod( DirectoryInfo outDirectory, SimpleMod mod, PenumbraSqPackStream dataStream )
{ {
PluginLog.Log( " -> Extracting {0} at {1}", mod.FullPath, mod.ModOffset.ToString( "X" ) ); PluginLog.Log( " -> Extracting {0} at {1}", mod.FullPath, mod.ModOffset.ToString( "X" ) );
try try
{ {
var data = dataStream.ReadFile< FileResource >( mod.ModOffset ); var data = dataStream.ReadFile< PenumbraSqPackStream.PenumbraFileResource >( mod.ModOffset );
var extractedFile = new FileInfo( Path.Combine( outDirectory.FullName, mod.FullPath ) ); var extractedFile = new FileInfo( Path.Combine( outDirectory.FullName, mod.FullPath ) );
extractedFile.Directory?.Create(); extractedFile.Directory?.Create();

View file

@ -0,0 +1,140 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace Penumbra.Util
{
public static class BinaryReaderExtensions
{
/// <summary>
/// Reads a structure from the current stream position.
/// </summary>
/// <param name="br"></param>
/// <typeparam name="T">The structure to read in to</typeparam>
/// <returns>The file data as a structure</returns>
public static T ReadStructure< T >( this BinaryReader br ) where T : struct
{
ReadOnlySpan< byte > data = br.ReadBytes( Unsafe.SizeOf< T >() );
return MemoryMarshal.Read< T >( data );
}
/// <summary>
/// Reads many structures from the current stream position.
/// </summary>
/// <param name="br"></param>
/// <param name="count">The number of T to read from the stream</param>
/// <typeparam name="T">The structure to read in to</typeparam>
/// <returns>A list containing the structures read from the stream</returns>
public static List< T > ReadStructures< T >( this BinaryReader br, int count ) where T : struct
{
var size = Marshal.SizeOf< T >();
var data = br.ReadBytes( size * count );
var list = new List< T >( count );
for( int i = 0; i < count; i++ )
{
var offset = size * i;
var span = new ReadOnlySpan< byte >( data, offset, size );
list.Add( MemoryMarshal.Read< T >( span ) );
}
return list;
}
public static T[] ReadStructuresAsArray< T >( this BinaryReader br, int count ) where T : struct
{
var size = Marshal.SizeOf< T >();
var data = br.ReadBytes( size * count );
// im a pirate arr
var arr = new T[ count ];
for( int i = 0; i < count; i++ )
{
var offset = size * i;
var span = new ReadOnlySpan< byte >( data, offset, size );
arr[ i ] = MemoryMarshal.Read< T >( span );
}
return arr;
}
/// <summary>
/// Moves the BinaryReader position to offset, reads a string, then
/// sets the reader position back to where it was when it started
/// </summary>
/// <param name="br"></param>
/// <param name="offset">The offset to read a string starting from.</param>
/// <returns></returns>
public static string ReadStringOffsetData( this BinaryReader br, long offset )
{
return Encoding.UTF8.GetString( ReadRawOffsetData( br, offset ) );
}
/// <summary>
/// Moves the BinaryReader position to offset, reads raw bytes until a null byte, then
/// sets the reader position back to where it was when it started
/// </summary>
/// <param name="br"></param>
/// <param name="offset">The offset to read data starting from.</param>
/// <returns></returns>
public static byte[] ReadRawOffsetData( this BinaryReader br, long offset )
{
var originalPosition = br.BaseStream.Position;
br.BaseStream.Position = offset;
var chars = new List< byte >();
byte current;
while( ( current = br.ReadByte() ) != 0 )
{
chars.Add( current );
}
br.BaseStream.Position = originalPosition;
return chars.ToArray();
}
/// <summary>
/// Seeks this BinaryReader's position to the given offset. Syntactic sugar.
/// </summary>
public static void Seek( this BinaryReader br, long offset ) {
br.BaseStream.Position = offset;
}
/// <summary>
/// Reads a byte and moves the stream position back to where it started before the operation
/// </summary>
/// <param name="br">The reader to use to read the byte</param>
/// <returns>The byte that was read</returns>
public static byte PeekByte( this BinaryReader br )
{
var data = br.ReadByte();
br.BaseStream.Position--;
return data;
}
/// <summary>
/// Reads bytes and moves the stream position back to where it started before the operation
/// </summary>
/// <param name="br">The reader to use to read the bytes</param>
/// <param name="count">The number of bytes to read</param>
/// <returns>The read bytes</returns>
public static byte[] PeekBytes( this BinaryReader br, int count )
{
var data = br.ReadBytes( count );
br.BaseStream.Position -= count;
return data;
}
}
}

View file

@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Penumbra.Util
{
public static class MemoryStreamExtensions
{
public static void Write( this MemoryStream stream, byte[] data )
{
stream.Write( data, 0, data.Length );
}
}
}

View file

@ -0,0 +1,401 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using Lumina;
using Lumina.Data;
using Lumina.Data.Structs;
namespace Penumbra.Util
{
public class PenumbraSqPackStream : IDisposable
{
public Stream BaseStream { get; protected set; }
protected BinaryReader Reader { get; set; }
public PenumbraSqPackStream( FileInfo file ) : this( file.OpenRead() ) {}
public PenumbraSqPackStream( Stream stream )
{
BaseStream = stream;
Reader = new BinaryReader( BaseStream );
}
public SqPackHeader GetSqPackHeader()
{
BaseStream.Position = 0;
return Reader.ReadStructure< SqPackHeader >();
}
public SqPackFileInfo GetFileMetadata( long offset )
{
BaseStream.Position = offset;
return Reader.ReadStructure< SqPackFileInfo >();
}
public T ReadFile< T >( long offset ) where T : PenumbraFileResource
{
using var ms = new MemoryStream();
BaseStream.Position = offset;
var fileInfo = Reader.ReadStructure< SqPackFileInfo >();
var file = Activator.CreateInstance< T >();
// check if we need to read the extended model header or just default to the standard file header
if( fileInfo.Type == FileType.Model )
{
BaseStream.Position = offset;
var modelFileInfo = Reader.ReadStructure< ModelBlock >();
file.FileInfo = new PenumbraFileInfo
{
HeaderSize = modelFileInfo.Size,
Type = modelFileInfo.Type,
BlockCount = modelFileInfo.UsedNumberOfBlocks,
RawFileSize = modelFileInfo.RawFileSize,
Offset = offset,
// todo: is this useful?
ModelBlock = modelFileInfo
};
}
else
{
file.FileInfo = new PenumbraFileInfo
{
HeaderSize = fileInfo.Size,
Type = fileInfo.Type,
BlockCount = fileInfo.NumberOfBlocks,
RawFileSize = fileInfo.RawFileSize,
Offset = offset
};
}
switch( fileInfo.Type )
{
case FileType.Empty:
throw new FileNotFoundException( $"The file located at 0x{offset:x} is empty." );
case FileType.Standard:
ReadStandardFile( file, ms );
break;
case FileType.Model:
ReadModelFile( file, ms );
break;
case FileType.Texture:
ReadTextureFile( file, ms );
break;
default:
throw new NotImplementedException( $"File Type {(UInt32)fileInfo.Type} is not implemented." );
}
file.Data = ms.ToArray();
if( file.Data.Length != file.FileInfo.RawFileSize )
Debug.WriteLine( "Read data size does not match file size." );
file.FileStream = new MemoryStream( file.Data, false );
file.Reader = new BinaryReader( file.FileStream );
file.FileStream.Position = 0;
file.LoadFile();
return file;
}
private void ReadStandardFile( PenumbraFileResource resource, MemoryStream ms )
{
var blocks = Reader.ReadStructures< DatStdFileBlockInfos >( (int)resource.FileInfo.BlockCount );
foreach( var block in blocks )
{
ReadFileBlock( resource.FileInfo.Offset + resource.FileInfo.HeaderSize + block.Offset, ms );
}
// reset position ready for reading
ms.Position = 0;
}
private unsafe void ReadModelFile( PenumbraFileResource resource, MemoryStream ms ) {
var mdlBlock = resource.FileInfo.ModelBlock;
long baseOffset = resource.FileInfo.Offset + resource.FileInfo.HeaderSize;
// 1/1/3/3/3 stack/runtime/vertex/egeo/index
// TODO: consider testing if this is more reliable than the Explorer method
// of adding mdlBlock.IndexBufferDataBlockIndex[2] + mdlBlock.IndexBufferDataBlockNum[2]
// i don't want to move this to that method right now, because i know sometimes the index is 0
// but it seems to work fine in explorer...
int totalBlocks = mdlBlock.StackBlockNum;
totalBlocks += mdlBlock.RuntimeBlockNum;
for( int i = 0; i < 3; i++ )
totalBlocks += mdlBlock.VertexBufferBlockNum[ i ];
for( int i = 0; i < 3; i++ )
totalBlocks += mdlBlock.EdgeGeometryVertexBufferBlockNum[ i ];
for( int i = 0; i < 3; i++ )
totalBlocks += mdlBlock.IndexBufferBlockNum[ i ];
var compressedBlockSizes = Reader.ReadStructures< UInt16 >( totalBlocks );
int currentBlock = 0;
int stackSize;
int runtimeSize;
int[] vertexDataOffsets = new int[3];
int[] indexDataOffsets = new int[3];
int[] vertexBufferSizes = new int[3];
int[] indexBufferSizes = new int[3];
ms.Seek( 0x44, SeekOrigin.Begin );
Reader.Seek( baseOffset + mdlBlock.StackOffset );
long stackStart = ms.Position;
for( int i = 0; i < mdlBlock.StackBlockNum; i++ ) {
long lastPos = Reader.BaseStream.Position;
ReadFileBlock( ms );
Reader.Seek( lastPos + compressedBlockSizes[ currentBlock ] );
currentBlock++;
}
long stackEnd = ms.Position;
stackSize = (int) ( stackEnd - stackStart );
Reader.Seek( baseOffset + mdlBlock.RuntimeOffset );
long runtimeStart = ms.Position;
for( int i = 0; i < mdlBlock.RuntimeBlockNum; i++ ) {
long lastPos = Reader.BaseStream.Position;
ReadFileBlock( ms );
Reader.Seek( lastPos + compressedBlockSizes[ currentBlock ] );
currentBlock++;
}
long runtimeEnd = ms.Position;
runtimeSize = (int) ( runtimeEnd - runtimeStart );
for( int i = 0; i < 3; i++ ) {
if( mdlBlock.VertexBufferBlockNum[ i ] != 0 ) {
int currentVertexOffset = (int) ms.Position;
if( i == 0 || currentVertexOffset != vertexDataOffsets[ i - 1 ] )
vertexDataOffsets[ i ] = currentVertexOffset;
else
vertexDataOffsets[ i ] = 0;
Reader.Seek( baseOffset + mdlBlock.VertexBufferOffset[ i ] );
for( int j = 0; j < mdlBlock.VertexBufferBlockNum[ i ]; j++ ) {
long lastPos = Reader.BaseStream.Position;
vertexBufferSizes[ i ] += (int) ReadFileBlock( ms );
Reader.Seek( lastPos + compressedBlockSizes[ currentBlock ] );
currentBlock++;
}
}
if( mdlBlock.EdgeGeometryVertexBufferBlockNum[ i ] != 0 ) {
for( int j = 0; j < mdlBlock.EdgeGeometryVertexBufferBlockNum[ i ]; j++ ) {
long lastPos = Reader.BaseStream.Position;
ReadFileBlock( ms );
Reader.Seek( lastPos + compressedBlockSizes[ currentBlock ] );
currentBlock++;
}
}
if( mdlBlock.IndexBufferBlockNum[ i ] != 0 ) {
int currentIndexOffset = (int) ms.Position;
if( i == 0 || currentIndexOffset != indexDataOffsets[ i - 1 ] )
indexDataOffsets[ i ] = currentIndexOffset;
else
indexDataOffsets[ i ] = 0;
// i guess this is only needed in the vertex area, for i = 0
// Reader.Seek( baseOffset + mdlBlock.IndexBufferOffset[ i ] );
for( int j = 0; j < mdlBlock.IndexBufferBlockNum[ i ]; j++ ) {
long lastPos = Reader.BaseStream.Position;
indexBufferSizes[ i ] += (int) ReadFileBlock( ms );
Reader.Seek( lastPos + compressedBlockSizes[ currentBlock ] );
currentBlock++;
}
}
}
ms.Seek( 0, SeekOrigin.Begin );
ms.Write( BitConverter.GetBytes( mdlBlock.Version ) );
ms.Write( BitConverter.GetBytes( stackSize ) );
ms.Write( BitConverter.GetBytes( runtimeSize ) );
ms.Write( BitConverter.GetBytes( mdlBlock.VertexDeclarationNum ) );
ms.Write( BitConverter.GetBytes( mdlBlock.MaterialNum ) );
for( int i = 0; i < 3; i++ )
ms.Write( BitConverter.GetBytes( vertexDataOffsets[ i ] ) );
for( int i = 0; i < 3; i++ )
ms.Write( BitConverter.GetBytes( indexDataOffsets[ i ] ) );
for( int i = 0; i < 3; i++ )
ms.Write( BitConverter.GetBytes( vertexBufferSizes[ i ] ) );
for( int i = 0; i < 3; i++ )
ms.Write( BitConverter.GetBytes( indexBufferSizes[ i ] ) );
ms.Write( new [] {mdlBlock.NumLods} );
ms.Write( BitConverter.GetBytes( mdlBlock.IndexBufferStreamingEnabled ) );
ms.Write( BitConverter.GetBytes( mdlBlock.EdgeGeometryEnabled ) );
ms.Write( new byte[] {0} );
}
private void ReadTextureFile( PenumbraFileResource resource, MemoryStream ms )
{
var blocks = Reader.ReadStructures< LodBlock >( (int)resource.FileInfo.BlockCount );
// if there is a mipmap header, the comp_offset
// will not be 0
uint mipMapSize = blocks[ 0 ].CompressedOffset;
if( mipMapSize != 0 )
{
long originalPos = BaseStream.Position;
BaseStream.Position = resource.FileInfo.Offset + resource.FileInfo.HeaderSize;
ms.Write( Reader.ReadBytes( (int)mipMapSize ) );
BaseStream.Position = originalPos;
}
// i is for texture blocks, j is 'data blocks'...
for( byte i = 0; i < blocks.Count; i++ )
{
// start from comp_offset
long runningBlockTotal = blocks[ i ].CompressedOffset + resource.FileInfo.Offset + resource.FileInfo.HeaderSize;
ReadFileBlock( runningBlockTotal, ms, true );
for( int j = 1; j < blocks[ i ].BlockCount; j++ )
{
runningBlockTotal += (UInt32)Reader.ReadInt16();
ReadFileBlock( runningBlockTotal, ms, true );
}
// unknown
Reader.ReadInt16();
}
}
protected uint ReadFileBlock( MemoryStream dest, bool resetPosition = false ) {
return ReadFileBlock( Reader.BaseStream.Position, dest, resetPosition );
}
protected uint ReadFileBlock( long offset, MemoryStream dest, bool resetPosition = false )
{
long originalPosition = BaseStream.Position;
BaseStream.Position = offset;
var blockHeader = Reader.ReadStructure< DatBlockHeader >();
// uncompressed block
if( blockHeader.CompressedSize == 32000 )
{
dest.Write( Reader.ReadBytes( (int)blockHeader.UncompressedSize ) );
return blockHeader.UncompressedSize;
}
var data = Reader.ReadBytes( (int)blockHeader.UncompressedSize );
using( var compressedStream = new MemoryStream( data ) )
{
using var zlibStream = new DeflateStream( compressedStream, CompressionMode.Decompress );
zlibStream.CopyTo( dest );
zlibStream.Close();
}
if( resetPosition )
BaseStream.Position = originalPosition;
return blockHeader.UncompressedSize;
}
public void Dispose()
{
Reader?.Dispose();
}
public class PenumbraFileInfo
{
public UInt32 HeaderSize;
public FileType Type;
public UInt32 RawFileSize;
public UInt32 BlockCount;
public long Offset { get; internal set; }
public ModelBlock ModelBlock { get; internal set; }
}
public class PenumbraFileResource
{
public PenumbraFileResource()
{
}
public PenumbraFileInfo FileInfo { get; internal set; }
public byte[] Data { get; internal set; }
public Span< byte > DataSpan => Data.AsSpan();
public MemoryStream FileStream { get; internal set; }
public BinaryReader Reader { get; internal set; }
public ParsedFilePath FilePath { get; internal set; }
/// <summary>
/// Called once the files are read out from the dats. Used to further parse the file into usable data structures.
/// </summary>
public virtual void LoadFile()
{
// this function is intentionally left blank
}
public virtual void SaveFile( string path )
{
File.WriteAllBytes( path, Data );
}
public string GetFileHash()
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hash = sha256.ComputeHash( Data );
var sb = new StringBuilder();
foreach( var b in hash )
{
sb.Append( $"{b:x2}" );
}
return sb.ToString();
}
}
[StructLayout( LayoutKind.Sequential )]
struct DatBlockHeader
{
public UInt32 Size;
public UInt32 unknown1;
public UInt32 CompressedSize;
public UInt32 UncompressedSize;
};
[StructLayout( LayoutKind.Sequential )]
struct LodBlock
{
public UInt32 CompressedOffset;
public UInt32 CompressedSize;
public UInt32 DecompressedSize;
public UInt32 BlockOffset;
public UInt32 BlockCount;
}
}
}