Bulk import script early version + StuffIt 5 fixes.

This commit is contained in:
elasota
2021-06-30 00:45:53 -04:00
parent 66cce6bcd4
commit c27d78d329
15 changed files with 2228 additions and 156 deletions

322
ASADTool/ASADTool.cpp Normal file
View File

@@ -0,0 +1,322 @@
#include "WindowsUnicodeToolShim.h"
#include "PLBigEndian.h"
#include "MacFileInfo.h"
#include "CombinedTimestamp.h"
#include "CFileStream.h"
#include "PLCore.h"
#include <algorithm>
// https://tools.ietf.org/rfc/rfc1740
int ProcessFork(FILE *f, uint32_t length, const char *basePath, const char *suffix)
{
const size_t kBufferSize = 4096;
uint8_t buffer[kBufferSize];
std::string combinedPath = std::string(basePath) + suffix;
FILE *outF = fopen_utf8(combinedPath.c_str(), "wb");
if (!outF)
{
fprintf(stderr, "Failed to open output file '%s'", combinedPath.c_str());
return -1;
}
while (length > 0)
{
const size_t amountToCopy = std::min<size_t>(length, kBufferSize);
if (fread(buffer, 1, amountToCopy, f) != amountToCopy)
{
fprintf(stderr, "Failed to copy data");
fclose(outF);
return -1;
}
if (fwrite(buffer, 1, amountToCopy, outF) != amountToCopy)
{
fprintf(stderr, "Failed to copy data");
fclose(outF);
return -1;
}
length -= static_cast<uint32_t>(amountToCopy);
}
fclose(outF);
return 0;
}
int ProcessFileDatesInfo(FILE *f, uint32_t length, PortabilityLayer::MacFileProperties &mfp, PortabilityLayer::CombinedTimestamp &ts)
{
struct ASFileDates
{
BEInt32_t m_created;
BEInt32_t m_modified;
BEInt32_t m_backup;
BEInt32_t m_access;
};
ASFileDates fileDates;
if (length < sizeof(fileDates))
{
fprintf(stderr, "File dates block was truncated");
return -1;
}
if (fread(&fileDates, 1, sizeof(fileDates), f) != sizeof(fileDates))
{
fprintf(stderr, "Failed to read file dates");
return -1;
}
const int64_t asEpochToMacEpoch = -3029547600LL;
// Mac epoch in Unix time: -2082844800
// ASAD epoch in Unix time: 946702800
mfp.m_createdTimeMacEpoch = static_cast<int64_t>(fileDates.m_created) + asEpochToMacEpoch;
mfp.m_modifiedTimeMacEpoch = static_cast<int64_t>(fileDates.m_modified) + asEpochToMacEpoch;
ts.SetMacEpochTime(mfp.m_modifiedTimeMacEpoch);
return 0;
}
int ProcessFinderInfo(FILE *f, uint32_t length, PortabilityLayer::MacFileProperties &mfp)
{
struct ASFinderInfo
{
uint8_t m_type[4];
uint8_t m_creator[4];
BEUInt16_t m_finderFlags;
BEPoint m_location;
BEUInt16_t m_folder; // ???
};
struct ASExtendedFinderInfo
{
BEUInt16_t m_iconID;
uint8_t m_unused[6];
uint8_t m_scriptCode;
uint8_t m_xFlags;
BEUInt16_t m_commentID;
BEUInt32_t m_putAwayDirectoryID;
};
ASFinderInfo finderInfo;
if (length < sizeof(finderInfo))
{
fprintf(stderr, "Finder Info block was truncated");
return -1;
}
if (fread(&finderInfo, 1, sizeof(finderInfo), f) != sizeof(finderInfo))
{
fprintf(stderr, "Failed to read Finder info");
return -1;
}
memcpy(mfp.m_fileCreator, finderInfo.m_creator, 4);
memcpy(mfp.m_fileType, finderInfo.m_type, 4);
mfp.m_finderFlags = finderInfo.m_finderFlags;
mfp.m_xPos = finderInfo.m_location.h;
mfp.m_yPos = finderInfo.m_location.v;
return 0;
}
int ProcessMacintoshFileInfo(FILE *f, uint32_t length, PortabilityLayer::MacFileProperties &mfp)
{
struct ASMacInfo
{
uint8_t m_filler[3];
uint8_t m_protected;
};
ASMacInfo macInfo;
if (length < sizeof(macInfo))
{
fprintf(stderr, "File dates block was truncated");
return -1;
}
if (fread(&macInfo, 1, sizeof(macInfo), f) != sizeof(macInfo))
{
fprintf(stderr, "Failed to read file dates");
return -1;
}
mfp.m_protected = macInfo.m_protected;
return 0;
}
int ProcessFile(FILE *f, const char *outPath, PortabilityLayer::CombinedTimestamp ts, bool isDouble)
{
struct ASHeader
{
BEUInt32_t m_version;
uint8_t m_filler[16];
BEUInt16_t m_numEntries;
};
struct ASEntry
{
BEUInt32_t m_entryID;
BEUInt32_t m_offset;
BEUInt32_t m_length;
};
ASHeader header;
if (fread(&header, 1, sizeof(header), f) != sizeof(header))
{
fprintf(stderr, "Failed to read header");
return -1;
}
const uint32_t numEntries = header.m_numEntries;
if (numEntries > 0xffff)
{
fprintf(stderr, "Too many entries");
return -1;
}
if (numEntries == 0)
return 0;
std::vector<ASEntry> entries;
entries.resize(static_cast<uint32_t>(numEntries));
PortabilityLayer::MacFileProperties mfp;
if (fread(&entries[0], 1, sizeof(ASEntry) * numEntries, f) != sizeof(ASEntry) * numEntries)
{
fprintf(stderr, "Failed to read entries");
return -1;
}
for (const ASEntry &asEntry : entries)
{
int fseekResult = fseek(f, asEntry.m_offset, SEEK_SET);
if (fseekResult != 0)
return fseekResult;
int rc = 0;
switch (static_cast<uint32_t>(asEntry.m_entryID))
{
case 1:
if (asEntry.m_length > 0)
rc = ProcessFork(f, asEntry.m_length, outPath, ".gpd");
break;
case 2:
if (asEntry.m_length > 0)
rc = ProcessFork(f, asEntry.m_length, outPath, ".gpr");
break;
case 4:
if (asEntry.m_length > 0)
rc = ProcessFork(f, asEntry.m_length, outPath, ".gpc");
break;
case 8:
rc = ProcessFileDatesInfo(f, asEntry.m_length, mfp, ts);
break;
case 9:
rc = ProcessFinderInfo(f, asEntry.m_length, mfp);
break;
case 10:
rc = ProcessMacintoshFileInfo(f, asEntry.m_length, mfp);
break;
case 3: // Real name
case 5: // B&W icon
case 6: // Color icon
case 11: // ProDOS file info
case 12: // MS-DOS file info
case 13: // AFP short name
case 14: // AFP file info
case 15: // AFP directory ID
break;
default:
fprintf(stderr, "Unknown entry type %i", static_cast<int>(static_cast<uint32_t>(asEntry.m_entryID)));
return -1;
}
if (rc != 0)
return rc;
}
PortabilityLayer::MacFilePropertiesSerialized mfps;
mfps.Serialize(mfp);
std::string gpfPath = std::string(outPath) + ".gpf";
FILE *gpfFile = fopen_utf8(gpfPath.c_str(), "wb");
if (!gpfFile)
{
fprintf(stderr, "Failed to open output gpf");
return -1;
}
PortabilityLayer::CFileStream gpfStream(gpfFile);
mfps.WriteAsPackage(gpfStream, ts);
gpfStream.Close();
return 0;
}
int toolMain(int argc, const char **argv)
{
BEUInt32_t magic;
if (argc != 4)
{
fprintf(stderr, "Usage: ASADTool <input> <timestamp.ts> <output>");
return -1;
}
PortabilityLayer::CombinedTimestamp ts;
FILE *tsFile = fopen_utf8(argv[2], "rb");
if (!tsFile)
{
fprintf(stderr, "Could not open timestamp file");
return -1;
}
if (fread(&ts, 1, sizeof(ts), tsFile) != sizeof(ts))
{
fprintf(stderr, "Could not read timestamp file");
return -1;
}
fclose(tsFile);
FILE *asadFile = fopen_utf8(argv[1], "rb");
if (!asadFile)
{
fprintf(stderr, "Could not open input file");
return -1;
}
if (fread(&magic, 1, 4, asadFile) != 4)
{
fprintf(stderr, "Could not read file magic");
return -1;
}
int returnCode = 0;
if (magic == 0x00051607)
returnCode = ProcessFile(asadFile, argv[3], ts, true);
else if (magic == 0x00051600)
returnCode = ProcessFile(asadFile, argv[3], ts, false);
else
{
fprintf(stderr, "Unknown file type %x", static_cast<int>(magic));
return -1;
}
fclose(asadFile);
return returnCode;
}

96
ASADTool/ASADTool.vcxproj Normal file
View File

@@ -0,0 +1,96 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<VCProjectVersion>15.0</VCProjectVersion>
<ProjectGuid>{DF692F94-3A11-40E1-8846-9815B4DBBDB0}</ProjectGuid>
<RootNamespace>ASADTool</RootNamespace>
<WindowsTargetPlatformVersion>10.0.17763.0</WindowsTargetPlatformVersion>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v141</PlatformToolset>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v141</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="Shared">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\Debug.props" />
<Import Project="..\WindowsUnicodeToolShim.props" />
<Import Project="..\PortabilityLayer.props" />
<Import Project="..\GpCommon.props" />
<Import Project="..\Common.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\Release.props" />
<Import Project="..\WindowsUnicodeToolShim.props" />
<Import Project="..\PortabilityLayer.props" />
<Import Project="..\GpCommon.props" />
<Import Project="..\Common.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<SDLCheck>true</SDLCheck>
<ConformanceMode>true</ConformanceMode>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<SDLCheck>true</SDLCheck>
<ConformanceMode>true</ConformanceMode>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ProjectReference Include="..\PortabilityLayer\PortabilityLayer.vcxproj">
<Project>{6ec62b0f-9353-40a4-a510-3788f1368b33}</Project>
</ProjectReference>
<ProjectReference Include="..\WindowsUnicodeToolShim\WindowsUnicodeToolShim.vcxproj">
<Project>{15009625-1120-405e-8bba-69a16cd6713d}</Project>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<ClCompile Include="ASADTool.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
</Filter>
<Filter Include="Header Files">
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
<Extensions>h;hh;hpp;hxx;hm;inl;inc;ipp;xsd</Extensions>
</Filter>
<Filter Include="Resource Files">
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="ASADTool.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
</Project>

View File

@@ -71,6 +71,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "bin2h", "bin2h\bin2h.vcxpro
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "HouseTool", "HouseTool\HouseTool.vcxproj", "{B31BFF9D-2D14-4B1A-A625-8348CC3D8D67}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ASADTool", "ASADTool\ASADTool.vcxproj", "{DF692F94-3A11-40E1-8846-9815B4DBBDB0}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|x64 = Debug|x64
@@ -195,6 +197,10 @@ Global
{B31BFF9D-2D14-4B1A-A625-8348CC3D8D67}.Debug|x64.Build.0 = Debug|x64
{B31BFF9D-2D14-4B1A-A625-8348CC3D8D67}.Release|x64.ActiveCfg = Release|x64
{B31BFF9D-2D14-4B1A-A625-8348CC3D8D67}.Release|x64.Build.0 = Release|x64
{DF692F94-3A11-40E1-8846-9815B4DBBDB0}.Debug|x64.ActiveCfg = Debug|x64
{DF692F94-3A11-40E1-8846-9815B4DBBDB0}.Debug|x64.Build.0 = Debug|x64
{DF692F94-3A11-40E1-8846-9815B4DBBDB0}.Release|x64.ActiveCfg = Release|x64
{DF692F94-3A11-40E1-8846-9815B4DBBDB0}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -4,7 +4,7 @@ mkdir Packaged
mkdir Packaged\Houses
x64\Release\MiniRez.exe "GliderProData\Glider PRO.r" Packaged\ApplicationResources.gpr
x64\Release\gpr2gpa.exe "Packaged\ApplicationResources.gpr" "DefaultTimestamp.timestamp" "Packaged\ApplicationResources.gpa" "ApplicationResourcePatches\manifest.json"
x64\Release\gpr2gpa.exe "Packaged\ApplicationResources.gpr" "DefaultTimestamp.timestamp" "Packaged\ApplicationResources.gpa" -patch "ApplicationResourcePatches\manifest.json"
x64\Release\FTagData.exe "DefaultTimestamp.timestamp" "Packaged\ApplicationResources.gpf" data ozm5 0 0 locked
x64\Release\MergeGPF.exe "Packaged\ApplicationResources.gpf"
@@ -54,14 +54,14 @@ x64\Release\gpr2gpa.exe "Packaged\Houses\CD Demo House.gpr" "DefaultTimestamp.ti
x64\Release\gpr2gpa.exe "Packaged\Houses\Davis Station.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Davis Station.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Demo House.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Demo House.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Fun House.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Fun House.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Grand Prix.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Grand Prix.gpa" "HousePatches\GrandPrix.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\ImagineHouse PRO II.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\ImagineHouse PRO II.gpa" "HousePatches\ImagineHousePROII.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\In The Mirror.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\In The Mirror.gpa" "HousePatches\InTheMirror.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Grand Prix.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Grand Prix.gpa" -patch "HousePatches\GrandPrix.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\ImagineHouse PRO II.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\ImagineHouse PRO II.gpa" -patch "HousePatches\ImagineHousePROII.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\In The Mirror.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\In The Mirror.gpa" -patch "HousePatches\InTheMirror.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Land of Illusion.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Land of Illusion.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Leviathan.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Leviathan.gpa" "HousePatches\Leviathan.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Leviathan.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Leviathan.gpa" -patch "HousePatches\Leviathan.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Metropolis.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Metropolis.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Nemo's Market.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Nemo's Market.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Rainbow's End.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Rainbow's End.gpa" "HousePatches\RainbowsEnd.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Rainbow's End.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Rainbow's End.gpa" -patch "HousePatches\RainbowsEnd.json"
x64\Release\gpr2gpa.exe "Packaged\Houses\Slumberland.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Slumberland.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\SpacePods.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\SpacePods.gpa"
x64\Release\gpr2gpa.exe "Packaged\Houses\Teddy World.gpr" "DefaultTimestamp.timestamp" "Packaged\Houses\Teddy World.gpa"

View File

@@ -2386,7 +2386,7 @@ static ExportHouseResult_t TryExportSound(GpVector<uint8_t> &resData, const THan
{
BEUInt32_t m_samplePtr;
BEUInt32_t m_length;
BEFixed32_t m_sampleRate;
BEUFixed32_t m_sampleRate;
BEUInt32_t m_loopStart;
BEUInt32_t m_loopEnd;
uint8_t m_encoding;
@@ -2784,10 +2784,10 @@ static ExportHouseResult_t TryExportPictFromSurface(GpVector<uint8_t> &resData,
BEUInt16_t m_headerOp;
BEInt16_t m_v2Version;
BEInt16_t m_reserved1;
BEFixed32_t m_top;
BEFixed32_t m_left;
BEFixed32_t m_bottom;
BEFixed32_t m_right;
BESFixed32_t m_top;
BESFixed32_t m_left;
BESFixed32_t m_bottom;
BESFixed32_t m_right;
BEUInt32_t m_reserved2;
};

View File

@@ -229,6 +229,7 @@ int toolMain(int argc, const char **argv)
std::vector<size_t> fileNameSizes;
FILE *resF = fopen_utf8(resName.c_str(), "rb");
if (resF)
{
PortabilityLayer::ZipEndOfCentralDirectoryRecord eocd;

View File

@@ -244,8 +244,26 @@ typedef BEInteger<int32_t> BEInt32_t;
typedef BEInteger<uint16_t> BEUInt16_t;
typedef BEInteger<uint32_t> BEUInt32_t;
struct BEFixed32_t
struct BESFixed32_t
{
BEInt16_t m_intPart;
BEUInt16_t m_fracPart;
};
struct BEUFixed32_t
{
BEUInt16_t m_intPart;
BEUInt16_t m_fracPart;
};
struct BESFixed16_t
{
int8_t m_intPart;
uint8_t m_fracPart;
};
struct BEUFixed16_t
{
uint8_t m_intPart;
uint8_t m_fracPart;
};

View File

@@ -1,9 +1,11 @@
#include <string>
#include "GpUnicode.h"
#include "WindowsUnicodeToolShim.h"
#include <Windows.h>
#include <vector>
#include <direct.h>
// This library provides front-ends and shims to make tools a bit more portable by handling all path strings as UTF-8,
// and providing a "main" entry point that is also UTF-8.
@@ -121,6 +123,58 @@ void ScanDirectoryForExtension(std::vector<std::string> &outPaths, const char *p
FindClose(h);
}
struct DirectoryScanContext
{
WIN32_FIND_DATAW m_findDataW;
HANDLE m_handle;
bool m_first;
std::string m_utf8Name;
DirectoryScanEntry m_currentEntry;
};
static void ParseDirEntry(DirectoryScanContext &context)
{
context.m_utf8Name = ConvertWStringToUTF8(context.m_findDataW.cFileName);
context.m_currentEntry.m_name = context.m_utf8Name.c_str();
}
DirectoryScanContext *opendir_utf8(const char *name)
{
DirectoryScanContext *context = new DirectoryScanContext();
std::wstring dirFilter = std::wstring(L"\\\\?\\") + ConvertUTF8ToWString(name) + L"\\*";
context->m_handle = FindFirstFileW(dirFilter.c_str(), &context->m_findDataW);
if (context->m_handle == INVALID_HANDLE_VALUE)
{
delete context;
return nullptr;
}
context->m_first = true;
return context;
}
DirectoryScanEntry *readdir_utf8(DirectoryScanContext *dir)
{
if (dir->m_first)
dir->m_first = false;
else
{
if (!FindNextFileW(dir->m_handle, &dir->m_findDataW))
return nullptr;
}
ParseDirEntry(*dir);
return &dir->m_currentEntry;
}
void closedir_utf8(DirectoryScanContext *context)
{
FindClose(context->m_handle);
delete context;
}
int toolMain(int argc, const char **argv);

View File

@@ -7,3 +7,13 @@ int fputs_utf8(const char *str, FILE *f);
int mkdir_utf8(const char *path);
void TerminateDirectoryPath(std::string &path);
void ScanDirectoryForExtension(std::vector<std::string>& outPaths, const char *path, const char *ending, bool recursive);
struct DirectoryScanContext;
struct DirectoryScanEntry
{
const char *m_name;
};
DirectoryScanContext *opendir_utf8(const char *name);
DirectoryScanEntry *readdir_utf8(DirectoryScanContext *dir);
void closedir_utf8(DirectoryScanContext *context);

417
bulkimport.py Normal file
View File

@@ -0,0 +1,417 @@
import sys
import os
import subprocess
import zipfile
import shutil
import io
import json
debug_preserve_osx_dir = False
debug_preserve_temps = True
debug_preserve_resources = True
debug_preserve_qt = False
def invoke_command(process_path, args, output_lines=None):
print("Running " + str(process_path) + " with " + str(args))
args_concatenated = [process_path + ".exe"] + args
if output_lines != None:
completed_process = subprocess.run(args_concatenated, capture_output=True)
else:
completed_process = subprocess.run(args_concatenated)
if completed_process.returncode != 0:
print("Process crashed/failed with return code " + str(completed_process.returncode))
return False
if output_lines != None:
output_lines.clear()
output_lines.extend(completed_process.stdout.decode("utf-8", "ignore").splitlines(False))
return True
def recursive_scan_dir(out_paths, dir_path):
dir = os.scandir(dir_path)
for entry in dir:
if entry.is_dir():
recursive_scan_dir(out_paths, entry.path)
if entry.is_file():
out_paths.append(entry.path)
def decompress_zip(ftagdata_path, source_path, ts_path, decompress_path):
with zipfile.ZipFile(source_path, "r") as zfile:
zfile.extractall(decompress_path)
file_names = []
recursive_scan_dir(file_names, decompress_path)
for path in file_names:
if not invoke_command(ftagdata_path, [ts_path, path + ".gpf", "DATA", "DATA", "0", "0"]):
return False
os.replace(path, path + ".gpd")
return True
def fixup_macos_dir(ftagdata_path, asadtool_path, ts_path, dir_path, osx_path):
contents = []
recursive_scan_dir(contents, osx_path)
print("recursive_scan_dir results: " + str(contents))
for content_path in contents:
osx_rel_path = os.path.relpath(content_path, osx_path)
osx_rel_dir, osx_rel_file = os.path.split(osx_rel_path)
if osx_rel_file.startswith("._") and osx_rel_file.endswith(".gpd"):
out_path = os.path.join(dir_path, osx_rel_dir, osx_rel_file[2:-4])
if not invoke_command(asadtool_path, [content_path, ts_path, out_path]):
return False
return True
def recursive_fixup_macosx_dir(ftagdata_path, asadtool_path, ts_path, dir_path):
osx_path = os.path.join(dir_path, "__MACOSX")
if os.path.isdir(osx_path):
if not fixup_macos_dir(ftagdata_path, asadtool_path, ts_path, dir_path, osx_path):
print("fixup_macos_dir failed?")
return False
if not debug_preserve_osx_dir:
shutil.rmtree(osx_path)
dir = os.scandir(dir_path)
for entry in dir:
if entry.is_dir():
if not recursive_fixup_macosx_dir(ftagdata_path, asadtool_path, ts_path, entry.path):
return False
return True
def convert_movies(tools_dir, dir_path):
contents = []
recursive_scan_dir(contents, dir_path)
for content_path in contents:
print("convert_movies content path: " + content_path)
if content_path.endswith(".mov.gpf"):
if not os.path.isfile(content_path[:-4] + ".gpd"):
# Res-only movie, probably only contains external references, a.k.a. unusable
os.remove(content_path)
if os.path.isfile(content_path[:-4] + ".gpr"):
os.remove(content_path[:-4] + ".gpr")
else:
content_dir = os.path.dirname(content_path)
mov_path = content_path[:-4]
res_path = mov_path + ".gpr"
data_path = mov_path + ".gpd"
if os.path.isfile(res_path):
if not invoke_command(os.path.join(tools_dir, "flattenmov"), [data_path, res_path, mov_path]):
return False
if not debug_preserve_qt:
os.remove(res_path)
os.remove(data_path)
else:
if os.path.isfile(mov_path):
os.remove(mov_path)
os.rename(data_path, mov_path)
probe_lines = []
if not invoke_command(os.path.join(tools_dir, "ffprobe"), ["-show_streams", mov_path], probe_lines):
return False
v_index = None
v_fps_num = None
v_fps_denom = None
a_index = None
a_nbframes = None
a_sample_rate = None
current_fps = None
current_index = None
current_type = None
current_nbframes = None
current_sample_rate = None
is_stream = False
for l in probe_lines:
if is_stream:
if l == "[/STREAM]":
print("Closing stream: " + str(current_type) + " " + str(current_index) + " " + str(current_fps) + " " + str(current_nbframes) + " " + str(current_sample_rate))
if current_type == "video" and current_index != None and current_fps != None:
fps_list = current_fps.split("/")
v_index = current_index
v_fps_num = fps_list[0]
v_fps_denom = fps_list[1]
if current_type == "audio" and current_index != None and current_nbframes != None and current_sample_rate != None:
a_index = current_index
a_nbframes = current_nbframes
a_sample_rate = current_sample_rate
current_fps = None
current_index = None
current_type = None
current_nbframes = None
current_sample_rate = None
is_stream = False
elif l.startswith("codec_type="):
current_type = l[11:]
elif l.startswith("index="):
current_index = l[6:]
elif l.startswith("r_frame_rate="):
current_fps = l[13:]
elif l.startswith("nb_frames="):
current_nbframes = l[10:]
elif l.startswith("sample_rate="):
current_sample_rate = l[12:]
elif l == "[STREAM]":
current_fps_num = None
current_fps_denom = None
current_index = None
current_type = None
is_stream = True
wav_path = None
if a_index != None:
sample_rate_int = int(a_sample_rate)
target_sample_rate = "22254"
if sample_rate_int == 11025 or sample_rate_int == 44100:
target_sample_rate = "22050"
elif sample_rate_int < 22000 or sample_rate_int > 23000:
target_sample_rate = a_sample_rate
wav_path = os.path.join(content_dir, "0.wav")
if not invoke_command(os.path.join(tools_dir, "ffmpeg"), ["-y", "-i", mov_path, "-ac", "1", "-ar", target_sample_rate, "-c:a", "pcm_u8", wav_path]):
return False
if v_index != None:
if not invoke_command(os.path.join(tools_dir, "ffmpeg"), ["-y", "-i", mov_path, os.path.join(content_dir, "%d.bmp")]):
return False
if a_index != None or v_index != None:
with zipfile.ZipFile(mov_path + ".gpa", "w") as vid_archive:
metaf = io.StringIO()
if v_index != None:
metaf.write("{\n")
metaf.write("\t\"frameRateNumerator\" : " + v_fps_num + ",\n")
metaf.write("\t\"frameRateDenominator\" : " + v_fps_denom + "\n")
metaf.write("}\n")
else:
metaf.write("{\n")
metaf.write("\t\"frameRateNumerator\" : " + a_nbframes + ",\n")
metaf.write("\t\"frameRateDenominator\" : " + a_sample_rate + "\n")
metaf.write("}\n")
vid_archive.writestr("muvi/0.json", metaf.getvalue(), compress_type=zipfile.ZIP_DEFLATED, compresslevel=9)
if v_index != None:
frame_num = 1
bmp_name = str(frame_num) + ".bmp"
bmp_path = os.path.join(content_dir, bmp_name)
while os.path.isfile(bmp_path):
vid_archive.write(bmp_path, arcname=("PICT/" + bmp_name), compress_type=zipfile.ZIP_DEFLATED, compresslevel=9)
os.remove(bmp_path)
frame_num = frame_num + 1
bmp_name = str(frame_num) + ".bmp"
bmp_path = os.path.join(content_dir, bmp_name)
if a_index != None:
vid_archive.write(wav_path, arcname=("snd$20/0.wav"), compress_type=zipfile.ZIP_DEFLATED, compresslevel=9)
os.remove(wav_path)
if not debug_preserve_qt:
os.remove(mov_path)
return True
def reprocess_children(source_paths, dir_path):
reprocess_extensions = [ "sea", "bin", "hqx", "zip", "cpt", "sit" ]
contents = []
recursive_scan_dir(contents, dir_path)
for ext in reprocess_extensions:
full_ext = "." + ext + ".gpf"
for content_path in contents:
if content_path.endswith(full_ext):
truncated_path = content_path[:-4]
data_path = truncated_path + ".gpd"
if os.path.isfile(data_path):
os.rename(data_path, truncated_path)
source_paths.append(truncated_path)
print("Requeueing subpath " + truncated_path)
return True
def convert_resources(tools_dir, ts_path, qt_convert_dir, dir_path):
contents = []
recursive_scan_dir(contents, dir_path)
for content_path in contents:
if content_path.endswith(".gpr"):
if not invoke_command(os.path.join(tools_dir, "gpr2gpa"), [content_path, ts_path, content_path[:-4] + ".gpa", "-dumpqt", qt_convert_dir]):
return False
qt_convert_contents = []
recursive_scan_dir(qt_convert_contents, qt_convert_dir)
converted_pict_ids = []
# Convert inline QuickTime PICT resources
for convert_content_path in qt_convert_contents:
if convert_content_path.endswith(".mov"):
if not invoke_command(os.path.join(tools_dir, "ffmpeg"), ["-y", "-i", convert_content_path, convert_content_path[:-4] + ".bmp"]):
return False
os.remove(convert_content_path)
converted_pict_ids.append(os.path.basename(convert_content_path[:-4]))
if len(converted_pict_ids) > 0:
print("Reimporting converted QuickTime PICTs")
qt_convert_json_path = os.path.join(dir_path, "qt_convert.json")
convert_dict = { }
convert_dict["delete"] = []
convert_dict["add"] = { }
for pict_id in converted_pict_ids:
convert_dict["add"]["PICT/" + pict_id + ".bmp"] = os.path.join(qt_convert_dir, pict_id + ".bmp")
with open(qt_convert_json_path, "w") as f:
json.dump(convert_dict, f)
if not invoke_command(os.path.join(tools_dir, "gpr2gpa"), [content_path, ts_path, content_path[:-4] + ".gpa", "-patch", qt_convert_json_path]):
return False
for pict_id in converted_pict_ids:
os.remove(os.path.join(qt_convert_dir, pict_id + ".bmp"))
os.remove(qt_convert_json_path)
if not debug_preserve_resources:
os.remove(content_path)
return True
def scoop_files(tools_dir, output_dir, dir_path):
mergegpf_path = os.path.join(tools_dir, "MergeGPF")
contents = []
recursive_scan_dir(contents, dir_path)
scooped_files = []
for content_path in contents:
if content_path.endswith(".gpf"):
is_house = False
with zipfile.ZipFile(content_path, "r") as zfile:
meta_contents = None
with zfile.open("!!meta", "r") as metafile:
meta_contents = metafile.read()
if meta_contents[0] == 103 and meta_contents[1] == 108 and meta_contents[2] == 105 and meta_contents[3] == 72:
is_house = True
if is_house:
if not invoke_command(mergegpf_path, [content_path]):
return False
scooped_files.append(content_path)
mov_path = content_path[:-4] + ".mov.gpf"
if os.path.isfile(mov_path):
if not invoke_command(mergegpf_path, [mov_path]):
return False
scooped_files.append(mov_path)
for scoop_path in scooped_files:
os.replace(scoop_path, os.path.join(output_dir, os.path.basename(scoop_path)))
return True
class ImportContext:
def __init__(self):
pass
def run(self):
os.makedirs(self.qt_convert_dir, exist_ok=True)
os.makedirs(self.output_dir, exist_ok=True)
invoke_command(self.make_timestamp_path, [self.ts_path])
print("Looking for input files in " + self.source_dir)
source_paths = []
recursive_scan_dir(source_paths, self.source_dir)
pending_result_directories = []
result_dir_index = 0
while len(source_paths) > 0:
source_path = source_paths[0]
source_paths = source_paths[1:]
unpack_dir = os.path.join(self.output_dir, str(len(pending_result_directories)))
try:
os.mkdir(unpack_dir)
except FileExistsError as error:
pass
print("Attempting to unpack " + source_path)
decompressed_ok = False
should_decompress = True
if source_path.endswith(".zip"):
decompressed_ok = decompress_zip(self.ftagdata_path, source_path, self.ts_path, unpack_dir)
elif source_path.endswith(".sit") or source_path.endswith(".cpt") or source_path.endswith(".sea"):
decompressed_ok = invoke_command(os.path.join(self.tools_dir, "unpacktool"), [source_path, self.ts_path, unpack_dir, "-paranoid"])
elif source_path.endswith(".bin"):
decompressed_ok = invoke_command(os.path.join(self.tools_dir, "bin2gp"), [source_path, self.ts_path, os.path.join(unpack_dir, os.path.basename(source_path[:-4]))])
elif source_path.endswith(".hqx"):
decompressed_ok = invoke_command(os.path.join(self.tools_dir, "hqx2gp"), [source_path, self.ts_path, os.path.join(unpack_dir, os.path.basename(source_path[:-4]))])
else:
should_decompress = False
if should_decompress and not decompressed_ok:
return
if decompressed_ok:
pending_result_directories.append(unpack_dir)
while result_dir_index < len(pending_result_directories):
if not self.process_dir(pending_result_directories, result_dir_index, source_paths):
return
result_dir_index = result_dir_index + 1
# Clear temporaries
if not debug_preserve_temps:
for dir_path in pending_result_directories:
shutil.rmtree(dir_path)
def process_dir(self, all_dirs, dir_index, source_paths):
root = all_dirs[dir_index]
print("Processing directory " + root)
if not recursive_fixup_macosx_dir(self.ftagdata_path, os.path.join(self.tools_dir, "ASADTool"), self.ts_path, root):
return False
if not convert_movies(self.tools_dir, root):
return False
if not convert_resources(self.tools_dir, self.ts_path, self.qt_convert_dir, root):
return False
if not reprocess_children(source_paths, root):
return False
if not scoop_files(self.tools_dir, self.output_dir, root):
return False
return True
def main():
import_context = ImportContext()
#script_dir = sys.argv[0]
#source_dir = sys.argv[1]
import_context.source_dir = "C:\\Users\\Eric\\Downloads\\gliderfiles\\archives"
#output_dir = sys.argv[2]
import_context.output_dir = "C:\\Users\\Eric\\Downloads\\gliderfiles\\converted"
import_context.qt_convert_dir = os.path.join(import_context.output_dir, "qtconvert")
import_context.tools_dir = "D:\\src\\GlidePort\\x64\\Release"
import_context.make_timestamp_path = os.path.join(import_context.tools_dir, "MakeTimestamp")
import_context.ts_path = os.path.join(import_context.output_dir, "Timestamp.ts")
import_context.ftagdata_path = os.path.join(import_context.tools_dir, "FTagData")
import_context.run()
main()

View File

@@ -2,6 +2,8 @@
#include "MacFileMem.h"
#include "CFileStream.h"
#include "MemReaderStream.h"
#include "PLDrivers.h"
#include "PLBigEndian.h"
#include "ResourceCompiledTypeList.h"
#include "ResourceFile.h"
#include "ScopedPtr.h"
@@ -50,29 +52,71 @@ int main(int argc, const char **argv)
mfi.m_resourceForkSize = resSize;
mfi.m_commentSize = 0;
GpDriverCollection *drivers = PLDrivers::GetDriverCollection();
drivers->SetDriver<GpDriverIDs::kAlloc>(GpAllocator_C::GetInstance());
PortabilityLayer::ScopedPtr<PortabilityLayer::MacFileMem> memFile = PortabilityLayer::MacFileMem::Create(GpAllocator_C::GetInstance(), dataFork, resFork, nullptr, mfi);
delete[] dataFork;
delete[] resFork;
const uint8_t *dataBytes = memFile->DataFork();
if (dataBytes[0] == 0 && dataBytes[1] == 0 && dataBytes[2] == 0 && dataBytes[3] == 0)
{
uint32_t mdatSize = memFile->FileInfo().m_dataForkSize;
uint8_t mdatSizeEncoded[4];
mdatSizeEncoded[0] = ((mdatSize >> 24) & 0xff);
mdatSizeEncoded[1] = ((mdatSize >> 16) & 0xff);
mdatSizeEncoded[2] = ((mdatSize >> 8) & 0xff);
mdatSizeEncoded[3] = ((mdatSize >> 0) & 0xff);
PortabilityLayer::ResourceFile *rf = PortabilityLayer::ResourceFile::Create();
size_t terminalAtomPos = 0;
const size_t dataForkSize = memFile->FileInfo().m_dataForkSize;
bool ignoreAndCopy = false;
if (dataBytes[0] == 'F' && dataBytes[1] == 'O' && dataBytes[2] == 'R' && dataBytes[3] == 'M')
{
fprintf(stderr, "File appears to actually be an AIFF file\n");
ignoreAndCopy = true;
}
const uint8_t *moovResBytes = nullptr;
uint32_t moovResSize = 0;
PortabilityLayer::ResourceFile *rf = nullptr;
if (ignoreAndCopy)
terminalAtomPos = dataForkSize;
else
{
while (terminalAtomPos < dataForkSize)
{
size_t szAvailable = dataForkSize - terminalAtomPos;
if (szAvailable < 4)
{
fprintf(stderr, "Error looking for terminal atom");
return -1;
}
BEUInt32_t atomSize;
memcpy(&atomSize, dataBytes + terminalAtomPos, 4);
if (atomSize == 0)
break;
if (szAvailable < atomSize)
{
fprintf(stderr, "Error looking for terminal atom");
return -1;
}
terminalAtomPos += atomSize;
}
rf = PortabilityLayer::ResourceFile::Create();
if (rf)
{
PortabilityLayer::MemReaderStream resStream(memFile->ResourceFork(), memFile->FileInfo().m_resourceForkSize);
rf->Load(&resStream);
const PortabilityLayer::ResourceCompiledTypeList *typeList = rf->GetResourceTypeList(PortabilityLayer::ResTypeID('moov'));
const uint8_t *moovResBytes = nullptr;
uint32_t moovResSize = 0;
if (typeList != nullptr)
{
for (size_t refIndex = 0; refIndex < typeList->m_numRefs; refIndex++)
{
const PortabilityLayer::ResourceCompiledRef &ref = typeList->m_firstRef[refIndex];
@@ -80,6 +124,9 @@ int main(int argc, const char **argv)
moovResBytes = ref.m_resData;
break;
}
}
}
}
FILE *outF = fopen(argv[3], "wb");
if (!outF)
@@ -88,25 +135,23 @@ int main(int argc, const char **argv)
return -1;
}
fwrite(mdatSizeEncoded, 1, 4, outF);
fwrite(dataBytes + 4, 1, mdatSize - 4, outF);
if (terminalAtomPos > 0)
fwrite(dataBytes, 1, terminalAtomPos, outF);
if (terminalAtomPos < dataForkSize)
{
BEUInt32_t atomSize(static_cast<uint32_t>(dataForkSize - terminalAtomPos));
fwrite(&atomSize, 1, 4, outF);
fwrite(dataBytes + terminalAtomPos + 4, 1, dataForkSize - terminalAtomPos - 4, outF);
}
if (moovResBytes)
fwrite(moovResBytes, 1, moovResSize, outF);
fclose(outF);
rf->Destroy();
}
else
{
FILE *outF = fopen(argv[3], "wb");
if (!outF)
{
fprintf(stderr, "Could not open output file '%s'", argv[3]);
return -1;
}
fwrite(dataBytes, 1, memFile->FileInfo().m_dataForkSize, outF);
fclose(outF);
}
if (rf)
rf->Destroy();
return 0;
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
#include "PLBigEndian.h"
#include <vector>
#include <unordered_map>
#include "CSInputBuffer.h"
@@ -101,9 +102,15 @@ struct StuffIt5Block
std::vector<uint8_t> m_filename;
std::vector<StuffIt5Block> m_children;
int m_numChildren;
bool Read(IFileReader &reader)
int64_t m_endPos;
bool Read(IFileReader &reader, bool &outIsDirectoryAppendage)
{
outIsDirectoryAppendage = false;
int64_t headerPos = reader.GetPosition();
if (!reader.ReadExact(&m_header, sizeof(m_header)))
return false;
@@ -145,13 +152,12 @@ struct StuffIt5Block
if (commentLength > m_header.m_headerSize - sizeWithOnlyNameAndPasswordInfo - 4)
return false;
m_commentSize = commentLength;
m_commentPos = reader.GetPosition();
if (commentLength)
{
if (reader.SeekCurrent(commentLength))
if (!reader.SeekCurrent(commentLength))
return false;
}
@@ -166,6 +172,13 @@ struct StuffIt5Block
if (!reader.SeekCurrent(m_header.m_headerSize - sizeWithCommentData))
return false;
if (m_header.m_dataForkDesc.m_uncompressedSize == static_cast<uint32_t>(0xffffffff))
{
outIsDirectoryAppendage = true;
m_endPos = reader.GetPosition();
return true;
}
if (!reader.ReadExact(&m_annex1, sizeof(m_annex1)))
return false;
@@ -199,21 +212,13 @@ struct StuffIt5Block
{
int numFiles = (m_header.m_dataForkDesc.m_algorithm_dirNumFilesHigh << 8) | (m_header.m_dataForkDesc.m_passwordDataLength_dirNumFilesLow);
m_children.resize(numFiles);
for (int i = 0; i < numFiles; i++)
{
if (i != 0)
{
if (!reader.SeekStart(m_children[i - 1].m_header.m_nextEntryOffset))
return false;
}
if (!m_children[i].Read(reader))
return false;
}
m_numChildren = numFiles;
m_endPos = reader.GetPosition();
}
else
{
m_numChildren = 0;
if (m_hasResourceFork)
{
m_resForkPos = reader.GetPosition();
@@ -221,6 +226,8 @@ struct StuffIt5Block
}
else
m_dataForkPos = reader.GetPosition();
m_endPos = m_dataForkPos + m_header.m_dataForkDesc.m_compressedSize;
}
return true;
@@ -304,6 +311,34 @@ bool StuffIt5Parser::Check(IFileReader &reader)
return (*match) == '\0';
}
static bool RecursiveBuildTree(std::vector<StuffIt5Block> &dirBlocks, uint32_t dirPos, const std::vector<StuffIt5Block> &flatBlocks, const std::unordered_map<uint32_t, size_t> &filePosToDirectoryBlock, const std::unordered_map<size_t, uint32_t> &directoryBlockToFilePos, const std::unordered_map<uint32_t, std::vector<size_t>> &entryChildren, int depth)
{
if (depth == 16)
return false;
std::unordered_map<uint32_t, std::vector<size_t>>::const_iterator children = entryChildren.find(dirPos);
if (children == entryChildren.end())
return true;
for (size_t childIndex : children->second)
{
StuffIt5Block block = flatBlocks[childIndex];
if (block.m_isDirectory)
{
std::unordered_map<size_t, uint32_t>::const_iterator directoryFilePosIt = directoryBlockToFilePos.find(childIndex);
if (directoryFilePosIt == directoryBlockToFilePos.end())
return false;
if (!RecursiveBuildTree(block.m_children, directoryFilePosIt->second, flatBlocks, filePosToDirectoryBlock, directoryBlockToFilePos, entryChildren, depth + 1))
return false;
}
dirBlocks.push_back(static_cast<StuffIt5Block&&>(block));
}
return true;
}
ArchiveItemList *StuffIt5Parser::Parse(IFileReader &reader)
{
reader.SeekStart(0);
@@ -317,17 +352,52 @@ ArchiveItemList *StuffIt5Parser::Parse(IFileReader &reader)
if (!reader.SeekStart(header.m_rootDirFirstEntryOffset))
return nullptr;
std::vector<StuffIt5Block> rootDirBlocks;
rootDirBlocks.resize(numRootDirEntries);
size_t totalBlocks = numRootDirEntries;
std::vector<StuffIt5Block> flatBlocks;
for (int i = 0; i < numRootDirEntries; i++)
std::unordered_map<size_t, uint32_t> directoryBlockToFilePos;
std::unordered_map<uint32_t, size_t> filePosToDirectoryBlock;
// Unfortunately StuffIt 5 archive next/prev entry chains seem to be meaningless.
// The only real way to determine directory structure is after the fact.
for (int i = 0; i < totalBlocks; i++)
{
if (i != 0)
reader.SeekStart(rootDirBlocks[i - 1].m_header.m_nextEntryOffset);
int64_t fpos = reader.GetPosition();
if (!rootDirBlocks[i].Read(reader))
bool isAppendage = false;
StuffIt5Block flatBlock;
if (!flatBlock.Read(reader, isAppendage))
return nullptr;
if (isAppendage)
{
totalBlocks++;
continue;
}
if (flatBlock.m_isDirectory)
{
totalBlocks += flatBlock.m_numChildren;
directoryBlockToFilePos[flatBlocks.size()] = static_cast<uint32_t>(fpos);
filePosToDirectoryBlock[static_cast<uint32_t>(fpos)] = flatBlocks.size();
}
if (i != totalBlocks - 1)
{
if (!reader.SeekStart(flatBlock.m_endPos))
return nullptr;
}
flatBlocks.push_back(flatBlock);
}
std::unordered_map<uint32_t, std::vector<size_t>> entryChildren;
for (size_t i = 0; i < flatBlocks.size(); i++)
entryChildren[flatBlocks[i].m_header.m_dirEntryOffset].push_back(i);
std::vector<StuffIt5Block> rootDirBlocks;
RecursiveBuildTree(rootDirBlocks, 0, flatBlocks, filePosToDirectoryBlock, directoryBlockToFilePos, entryChildren, 0);
return ConvertToItemList(rootDirBlocks);
}

View File

@@ -95,7 +95,12 @@ StuffItParser g_stuffItParser;
StuffIt5Parser g_stuffIt5Parser;
CompactProParser g_compactProParser;
std::string LegalizeWindowsFileName(const std::string &path)
static bool IsSeparator(char c)
{
return c == '/' || c == '\\';
}
std::string LegalizeWindowsFileName(const std::string &path, bool paranoid)
{
const size_t length = path.length();
@@ -115,6 +120,9 @@ std::string LegalizeWindowsFileName(const std::string &path)
isLegalChar = false;
}
if (paranoid && isLegalChar)
isLegalChar = c == '_' || c == ' ' || c == '.' || c == ',' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9');
if (isLegalChar)
legalizedPath.append(&c, 1);
else
@@ -209,7 +217,7 @@ void MakeIntermediateDirectories(const std::string &path)
}
}
int RecursiveExtractFiles(int depth, ArchiveItemList *itemList, const std::string &path, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts);
int RecursiveExtractFiles(int depth, ArchiveItemList *itemList, const std::string &path, bool pathParanoid, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts);
int ExtractSingleFork(const ArchiveCompressedChunkDesc &chunkDesc, const std::string &path, IFileReader &reader)
{
@@ -361,7 +369,7 @@ int ExtractFile(const ArchiveItem &item, const std::string &path, IFileReader &r
return 0;
}
int ExtractItem(int depth, const ArchiveItem &item, const std::string &dirPath, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts)
int ExtractItem(int depth, const ArchiveItem &item, const std::string &dirPath, bool pathParanoid, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts)
{
std::string path(reinterpret_cast<const char*>(item.m_fileNameUTF8.data()), item.m_fileNameUTF8.size());
@@ -371,7 +379,7 @@ int ExtractItem(int depth, const ArchiveItem &item, const std::string &dirPath,
fputs_utf8(path.c_str(), stdout);
printf("\n");
path = LegalizeWindowsFileName(path);
path = LegalizeWindowsFileName(path, pathParanoid);
path = dirPath + path;
@@ -381,7 +389,7 @@ int ExtractItem(int depth, const ArchiveItem &item, const std::string &dirPath,
path.append("\\");
int returnCode = RecursiveExtractFiles(depth + 1, item.m_children, path, reader, ts);
int returnCode = RecursiveExtractFiles(depth + 1, item.m_children, path, pathParanoid, reader, ts);
if (returnCode)
return returnCode;
@@ -391,14 +399,14 @@ int ExtractItem(int depth, const ArchiveItem &item, const std::string &dirPath,
return ExtractFile(item, path, reader, ts);
}
int RecursiveExtractFiles(int depth, ArchiveItemList *itemList, const std::string &path, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts)
int RecursiveExtractFiles(int depth, ArchiveItemList *itemList, const std::string &path, bool pathParanoid, IFileReader &reader, const PortabilityLayer::CombinedTimestamp &ts)
{
const std::vector<ArchiveItem> &items = itemList->m_items;
const size_t numChildren = items.size();
for (size_t i = 0; i < numChildren; i++)
{
int returnCode = ExtractItem(depth, items[i], path, reader, ts);
int returnCode = ExtractItem(depth, items[i], path, pathParanoid, reader, ts);
if (returnCode)
return returnCode;
}
@@ -406,22 +414,25 @@ int RecursiveExtractFiles(int depth, ArchiveItemList *itemList, const std::strin
return 0;
}
int toolMain(int argc, const char **argv)
int PrintUsage()
{
if (argc != 4)
{
fprintf(stderr, "Usage: unpacktool <archive file> <timestamp.ts> <destination>");
fprintf(stderr, "Usage: unpacktool <archive file> <timestamp.ts> <destination> [options]");
fprintf(stderr, "Usage: unpacktool -bulk <timestamp.ts> <archive files>");
return -1;
}
}
FILE *inputArchive = fopen_utf8(argv[1], "rb");
int decompMain(int argc, const char **argv)
{
for (int i = 0; i < argc; i++)
printf("%s\n", argv[i]);
if (!inputArchive)
{
fprintf(stderr, "Could not open input archive");
return -1;
}
if (argc < 4)
return PrintUsage();
bool isBulkMode = !strcmp(argv[1], "-bulk");
if (!isBulkMode && argc < 4)
return PrintUsage();
FILE *tsFile = fopen_utf8(argv[2], "rb");
@@ -440,6 +451,60 @@ int toolMain(int argc, const char **argv)
fclose(tsFile);
int arcArg = 1;
int numArgArcs = 1;
if (isBulkMode)
{
arcArg = 3;
numArgArcs = argc - 3;
}
bool pathParanoid = false;
if (!isBulkMode)
{
for (int optArgIndex = 4; optArgIndex < argc; )
{
const char *optArg = argv[optArgIndex++];
if (!strcmp(optArg, "-paranoid"))
pathParanoid = true;
else
{
fprintf(stderr, "Unknown option %s\n", optArg);
return -1;
}
}
}
for (int arcArgIndex = 0; arcArgIndex < numArgArcs; arcArgIndex++)
{
const char *arcPath = argv[arcArg + arcArgIndex];
FILE *inputArchive = fopen_utf8(arcPath, "rb");
std::string destPath;
if (isBulkMode)
{
destPath = arcPath;
size_t lastSepIndex = 0;
for (size_t i = 1; i < destPath.size(); i++)
{
if (destPath[i] == '/' || destPath[i] == '\\')
lastSepIndex = i;
}
destPath = destPath.substr(0, lastSepIndex);
}
else
destPath = argv[3];
if (!inputArchive)
{
fprintf(stderr, "Could not open input archive");
return -1;
}
CFileReader reader(inputArchive);
IArchiveParser *parsers[] =
@@ -451,7 +516,7 @@ int toolMain(int argc, const char **argv)
ArchiveItemList *archiveItemList = nullptr;
printf("Reading archive...\n");
printf("Reading archive '%s'...\n", arcPath);
for (IArchiveParser *parser : parsers)
{
@@ -470,15 +535,27 @@ int toolMain(int argc, const char **argv)
printf("Decompressing files...\n");
std::string currentPath = argv[3];
std::string currentPath = destPath;
TerminateDirectoryPath(currentPath);
MakeIntermediateDirectories(currentPath);
int returnCode = RecursiveExtractFiles(0, archiveItemList, currentPath, reader, ts);
int returnCode = RecursiveExtractFiles(0, archiveItemList, currentPath, pathParanoid, reader, ts);
if (returnCode != 0)
{
fprintf(stderr, "Error decompressing archive");
return returnCode;
}
delete archiveItemList;
}
return returnCode;
return 0;
}
int toolMain(int argc, const char **argv)
{
int returnCode = decompMain(argc, argv);
return returnCode;
}