From aaab2115d14f7562118fd727dbff6d2db2b5f5e2 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 19:58:38 -0800 Subject: [PATCH 01/10] Fix all remaining build warnings and eliminate UB in binary parsers Resolve 57 compiler warnings (unused params/vars, ignored return values, enum mismatch) and replace undefined-behavior reinterpret_cast with memcpy in DBC, BLP, and Warden module loaders for ARM64 portability. --- include/platform/process.hpp | 4 +- src/audio/activity_sound_manager.cpp | 4 +- src/audio/ambient_sound_manager.cpp | 30 ++++++------ src/audio/combat_sound_manager.cpp | 2 +- src/audio/ui_sound_manager.cpp | 22 ++++----- src/game/warden_emulator.cpp | 20 ++++---- src/game/warden_module.cpp | 24 +++++----- src/pipeline/blp_loader.cpp | 72 ++++++++++++++++------------ src/pipeline/dbc_loader.cpp | 27 ++++++----- src/pipeline/m2_loader.cpp | 4 +- src/rendering/character_renderer.cpp | 2 +- src/rendering/vk_render_target.cpp | 2 +- 12 files changed, 112 insertions(+), 101 deletions(-) diff --git a/include/platform/process.hpp b/include/platform/process.hpp index 0fa9e981..5356b04b 100644 --- a/include/platform/process.hpp +++ b/include/platform/process.hpp @@ -92,8 +92,8 @@ inline ProcessHandle spawnProcess(const std::vector& args) { if (pid == 0) { // Child process setpgid(0, 0); - freopen("/dev/null", "w", stdout); - freopen("/dev/null", "w", stderr); + if (!freopen("/dev/null", "w", stdout)) { _exit(1); } + if (!freopen("/dev/null", "w", stderr)) { _exit(1); } // Build argv for exec std::vector argv; diff --git a/src/audio/activity_sound_manager.cpp b/src/audio/activity_sound_manager.cpp index 9ab37a2d..5b28d5c8 100644 --- a/src/audio/activity_sound_manager.cpp +++ b/src/audio/activity_sound_manager.cpp @@ -91,7 +91,7 @@ void ActivitySoundManager::shutdown() { assetManager = nullptr; } -void ActivitySoundManager::update(float deltaTime) { +void ActivitySoundManager::update([[maybe_unused]] float deltaTime) { reapProcesses(); // Play swimming stroke sounds periodically when swimming and moving @@ -168,7 +168,7 @@ void ActivitySoundManager::rebuildJumpClipsForProfile(const std::string& raceFol } } -void ActivitySoundManager::rebuildSwimLoopClipsForProfile(const std::string& raceFolder, const std::string& raceBase, bool male) { +void ActivitySoundManager::rebuildSwimLoopClipsForProfile([[maybe_unused]] const std::string& raceFolder, [[maybe_unused]] const std::string& raceBase, [[maybe_unused]] bool male) { swimLoopClips.clear(); // WoW 3.3.5a doesn't have dedicated swim loop sounds diff --git a/src/audio/ambient_sound_manager.cpp b/src/audio/ambient_sound_manager.cpp index f976cbdf..7ab88689 100644 --- a/src/audio/ambient_sound_manager.cpp +++ b/src/audio/ambient_sound_manager.cpp @@ -117,10 +117,10 @@ bool AmbientSoundManager::initialize(pipeline::AssetManager* assets) { bool forestNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\ForestNormalNight.wav", forestNormalNightSounds_[0], assets); forestSnowDaySounds_.resize(1); - bool forestSnowDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\ForestSnowDay.wav", forestSnowDaySounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\ForestSnowDay.wav", forestSnowDaySounds_[0], assets); forestSnowNightSounds_.resize(1); - bool forestSnowNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\ForestSnowNight.wav", forestSnowNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\ForestSnowNight.wav", forestSnowNightSounds_[0], assets); beachDaySounds_.resize(1); bool beachDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\BeachDay.wav", beachDaySounds_[0], assets); @@ -129,34 +129,34 @@ bool AmbientSoundManager::initialize(pipeline::AssetManager* assets) { bool beachNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\BeachNight.wav", beachNightSounds_[0], assets); grasslandsDaySounds_.resize(1); - bool grasslandsDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\GrasslandsDay.wav", grasslandsDaySounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\GrasslandsDay.wav", grasslandsDaySounds_[0], assets); grasslandsNightSounds_.resize(1); - bool grasslandsNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\GrassLandsNight.wav", grasslandsNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\GrassLandsNight.wav", grasslandsNightSounds_[0], assets); jungleDaySounds_.resize(1); - bool jungleDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\JungleDay.wav", jungleDaySounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\JungleDay.wav", jungleDaySounds_[0], assets); jungleNightSounds_.resize(1); - bool jungleNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\JungleNight.wav", jungleNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\JungleNight.wav", jungleNightSounds_[0], assets); marshDaySounds_.resize(1); - bool marshDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\MarshDay.wav", marshDaySounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\MarshDay.wav", marshDaySounds_[0], assets); marshNightSounds_.resize(1); - bool marshNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\MarshNight.wav", marshNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\MarshNight.wav", marshNightSounds_[0], assets); desertCanyonDaySounds_.resize(1); bool desertCanyonDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\CanyonDesertDay.wav", desertCanyonDaySounds_[0], assets); desertCanyonNightSounds_.resize(1); - bool desertCanyonNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\CanyonDesertNight.wav", desertCanyonNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\CanyonDesertNight.wav", desertCanyonNightSounds_[0], assets); desertPlainsDaySounds_.resize(1); bool desertPlainsDayLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\PlainsDesertDay.wav", desertPlainsDaySounds_[0], assets); desertPlainsNightSounds_.resize(1); - bool desertPlainsNightLoaded = loadSound("Sound\\Ambience\\ZoneAmbience\\PlainsDesertNight.wav", desertPlainsNightSounds_[0], assets); + loadSound("Sound\\Ambience\\ZoneAmbience\\PlainsDesertNight.wav", desertPlainsNightSounds_[0], assets); // Load city ambience sounds (day and night where available) stormwindDaySounds_.resize(1); @@ -169,10 +169,10 @@ bool AmbientSoundManager::initialize(pipeline::AssetManager* assets) { bool ironforgeLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\Ironforge.wav", ironforgeSounds_[0], assets); darnassusDaySounds_.resize(1); - bool darnassusDayLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\DarnassusDay.wav", darnassusDaySounds_[0], assets); + loadSound("Sound\\Ambience\\WMOAmbience\\DarnassusDay.wav", darnassusDaySounds_[0], assets); darnassusNightSounds_.resize(1); - bool darnassusNightLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\DarnassusNight.wav", darnassusNightSounds_[0], assets); + loadSound("Sound\\Ambience\\WMOAmbience\\DarnassusNight.wav", darnassusNightSounds_[0], assets); orgrimmarDaySounds_.resize(1); bool orgrimmarDayLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\OrgrimmarDay.wav", orgrimmarDaySounds_[0], assets); @@ -181,13 +181,13 @@ bool AmbientSoundManager::initialize(pipeline::AssetManager* assets) { bool orgrimmarNightLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\OrgrimmarNight.wav", orgrimmarNightSounds_[0], assets); undercitySounds_.resize(1); - bool undercityLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\Undercity.wav", undercitySounds_[0], assets); + loadSound("Sound\\Ambience\\WMOAmbience\\Undercity.wav", undercitySounds_[0], assets); thunderbluffDaySounds_.resize(1); - bool thunderbluffDayLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\ThunderBluffDay.wav", thunderbluffDaySounds_[0], assets); + loadSound("Sound\\Ambience\\WMOAmbience\\ThunderBluffDay.wav", thunderbluffDaySounds_[0], assets); thunderbluffNightSounds_.resize(1); - bool thunderbluffNightLoaded = loadSound("Sound\\Ambience\\WMOAmbience\\ThunderBluffNight.wav", thunderbluffNightSounds_[0], assets); + loadSound("Sound\\Ambience\\WMOAmbience\\ThunderBluffNight.wav", thunderbluffNightSounds_[0], assets); // Load bell toll sounds bellAllianceSounds_.resize(1); diff --git a/src/audio/combat_sound_manager.cpp b/src/audio/combat_sound_manager.cpp index d433a40c..55352459 100644 --- a/src/audio/combat_sound_manager.cpp +++ b/src/audio/combat_sound_manager.cpp @@ -295,7 +295,7 @@ void CombatSoundManager::playWeaponMiss(bool twoHanded) { } } -void CombatSoundManager::playImpact(WeaponSize weaponSize, ImpactType impactType, bool isCrit) { +void CombatSoundManager::playImpact([[maybe_unused]] WeaponSize weaponSize, ImpactType impactType, bool isCrit) { // Select appropriate impact sound library const std::vector* normalLibrary = nullptr; const std::vector* critLibrary = nullptr; diff --git a/src/audio/ui_sound_manager.cpp b/src/audio/ui_sound_manager.cpp index 626263d3..f50f1d6f 100644 --- a/src/audio/ui_sound_manager.cpp +++ b/src/audio/ui_sound_manager.cpp @@ -34,16 +34,16 @@ bool UiSoundManager::initialize(pipeline::AssetManager* assets) { bool charSheetCloseLoaded = loadSound("Sound\\Interface\\iAbilitiesCloseA.wav", characterSheetCloseSounds_[0], assets); auctionOpenSounds_.resize(1); - bool auctionOpenLoaded = loadSound("Sound\\Interface\\AuctionWindowOpen.wav", auctionOpenSounds_[0], assets); + loadSound("Sound\\Interface\\AuctionWindowOpen.wav", auctionOpenSounds_[0], assets); auctionCloseSounds_.resize(1); - bool auctionCloseLoaded = loadSound("Sound\\Interface\\AuctionWindowClose.wav", auctionCloseSounds_[0], assets); + loadSound("Sound\\Interface\\AuctionWindowClose.wav", auctionCloseSounds_[0], assets); guildBankOpenSounds_.resize(1); - bool guildBankOpenLoaded = loadSound("Sound\\Interface\\GuildVaultOpen.wav", guildBankOpenSounds_[0], assets); + loadSound("Sound\\Interface\\GuildVaultOpen.wav", guildBankOpenSounds_[0], assets); guildBankCloseSounds_.resize(1); - bool guildBankCloseLoaded = loadSound("Sound\\Interface\\GuildVaultClose.wav", guildBankCloseSounds_[0], assets); + loadSound("Sound\\Interface\\GuildVaultClose.wav", guildBankCloseSounds_[0], assets); // Load button sounds buttonClickSounds_.resize(1); @@ -63,7 +63,7 @@ bool UiSoundManager::initialize(pipeline::AssetManager* assets) { bool questFailedLoaded = loadSound("Sound\\Interface\\igQuestFailed.wav", questFailedSounds_[0], assets); questUpdateSounds_.resize(1); - bool questUpdateLoaded = loadSound("Sound\\Interface\\iQuestUpdate.wav", questUpdateSounds_[0], assets); + loadSound("Sound\\Interface\\iQuestUpdate.wav", questUpdateSounds_[0], assets); // Load loot sounds lootCoinSmallSounds_.resize(1); @@ -86,13 +86,13 @@ bool UiSoundManager::initialize(pipeline::AssetManager* assets) { bool pickupBookLoaded = loadSound("Sound\\Interface\\PickUp\\PickUpBook.wav", pickupBookSounds_[0], assets); pickupClothSounds_.resize(1); - bool pickupClothLoaded = loadSound("Sound\\Interface\\PickUp\\PickUpCloth_Leather01.wav", pickupClothSounds_[0], assets); + loadSound("Sound\\Interface\\PickUp\\PickUpCloth_Leather01.wav", pickupClothSounds_[0], assets); pickupFoodSounds_.resize(1); - bool pickupFoodLoaded = loadSound("Sound\\Interface\\PickUp\\PickUpFoodGeneric.wav", pickupFoodSounds_[0], assets); + loadSound("Sound\\Interface\\PickUp\\PickUpFoodGeneric.wav", pickupFoodSounds_[0], assets); pickupGemSounds_.resize(1); - bool pickupGemLoaded = loadSound("Sound\\Interface\\PickUp\\PickUpGems.wav", pickupGemSounds_[0], assets); + loadSound("Sound\\Interface\\PickUp\\PickUpGems.wav", pickupGemSounds_[0], assets); // Load eating/drinking sounds eatingSounds_.resize(1); @@ -107,13 +107,13 @@ bool UiSoundManager::initialize(pipeline::AssetManager* assets) { // Load error/feedback sounds errorSounds_.resize(1); - bool errorLoaded = loadSound("Sound\\Interface\\Error.wav", errorSounds_[0], assets); + loadSound("Sound\\Interface\\Error.wav", errorSounds_[0], assets); selectTargetSounds_.resize(1); - bool selectTargetLoaded = loadSound("Sound\\Interface\\iSelectTarget.wav", selectTargetSounds_[0], assets); + loadSound("Sound\\Interface\\iSelectTarget.wav", selectTargetSounds_[0], assets); deselectTargetSounds_.resize(1); - bool deselectTargetLoaded = loadSound("Sound\\Interface\\iDeselectTarget.wav", deselectTargetSounds_[0], assets); + loadSound("Sound\\Interface\\iDeselectTarget.wav", deselectTargetSounds_[0], assets); LOG_INFO("UISoundManager: Window sounds - Bag: ", (bagOpenLoaded && bagCloseLoaded) ? "YES" : "NO", ", QuestLog: ", (questLogOpenLoaded && questLogCloseLoaded) ? "YES" : "NO", diff --git a/src/game/warden_emulator.cpp b/src/game/warden_emulator.cpp index 11ff861f..1d43768b 100644 --- a/src/game/warden_emulator.cpp +++ b/src/game/warden_emulator.cpp @@ -122,7 +122,7 @@ bool WardenEmulator::initialize(const void* moduleCode, size_t moduleSize, uint3 uint32_t WardenEmulator::hookAPI(const std::string& dllName, const std::string& functionName, - std::function&)> handler) { + [[maybe_unused]] std::function&)> handler) { // Allocate address for this API stub static uint32_t nextStubAddr = API_STUB_BASE; uint32_t stubAddr = nextStubAddr; @@ -239,7 +239,7 @@ std::string WardenEmulator::readString(uint32_t address, size_t maxLen) { return std::string(buffer.data()); } -uint32_t WardenEmulator::allocateMemory(size_t size, uint32_t protection) { +uint32_t WardenEmulator::allocateMemory(size_t size, [[maybe_unused]] uint32_t protection) { // Align to 4KB size = (size + 0xFFF) & ~0xFFF; @@ -315,7 +315,7 @@ uint32_t WardenEmulator::apiVirtualFree(WardenEmulator& emu, const std::vector& args) { +uint32_t WardenEmulator::apiGetTickCount([[maybe_unused]] WardenEmulator& emu, [[maybe_unused]] const std::vector& args) { auto now = std::chrono::steady_clock::now(); auto ms = std::chrono::duration_cast(now.time_since_epoch()).count(); uint32_t ticks = static_cast(ms & 0xFFFFFFFF); @@ -324,7 +324,7 @@ uint32_t WardenEmulator::apiGetTickCount(WardenEmulator& emu, const std::vector< return ticks; } -uint32_t WardenEmulator::apiSleep(WardenEmulator& emu, const std::vector& args) { +uint32_t WardenEmulator::apiSleep([[maybe_unused]] WardenEmulator& emu, const std::vector& args) { if (args.size() < 1) return 0; uint32_t dwMilliseconds = args[0]; @@ -333,12 +333,12 @@ uint32_t WardenEmulator::apiSleep(WardenEmulator& emu, const std::vector& args) { +uint32_t WardenEmulator::apiGetCurrentThreadId([[maybe_unused]] WardenEmulator& emu, [[maybe_unused]] const std::vector& args) { std::cout << "[WinAPI] GetCurrentThreadId() = 1234" << '\n'; return 1234; // Fake thread ID } -uint32_t WardenEmulator::apiGetCurrentProcessId(WardenEmulator& emu, const std::vector& args) { +uint32_t WardenEmulator::apiGetCurrentProcessId([[maybe_unused]] WardenEmulator& emu, [[maybe_unused]] const std::vector& args) { std::cout << "[WinAPI] GetCurrentProcessId() = 5678" << '\n'; return 5678; // Fake process ID } @@ -347,7 +347,7 @@ uint32_t WardenEmulator::apiReadProcessMemory(WardenEmulator& emu, const std::ve // ReadProcessMemory(hProcess, lpBaseAddress, lpBuffer, nSize, lpNumberOfBytesRead) if (args.size() < 5) return 0; - uint32_t hProcess = args[0]; + [[maybe_unused]] uint32_t hProcess = args[0]; uint32_t lpBaseAddress = args[1]; uint32_t lpBuffer = args[2]; uint32_t nSize = args[3]; @@ -377,13 +377,11 @@ uint32_t WardenEmulator::apiReadProcessMemory(WardenEmulator& emu, const std::ve // Unicorn Callbacks // ============================================================================ -void WardenEmulator::hookCode(uc_engine* uc, uint64_t address, uint32_t size, void* userData) { - WardenEmulator* emu = static_cast(userData); +void WardenEmulator::hookCode([[maybe_unused]] uc_engine* uc, uint64_t address, [[maybe_unused]] uint32_t size, [[maybe_unused]] void* userData) { std::cout << "[Trace] 0x" << std::hex << address << std::dec << '\n'; } -void WardenEmulator::hookMemInvalid(uc_engine* uc, int type, uint64_t address, int size, int64_t value, void* userData) { - WardenEmulator* emu = static_cast(userData); +void WardenEmulator::hookMemInvalid([[maybe_unused]] uc_engine* uc, int type, uint64_t address, int size, [[maybe_unused]] int64_t value, [[maybe_unused]] void* userData) { const char* typeStr = "UNKNOWN"; switch (type) { diff --git a/src/game/warden_module.cpp b/src/game/warden_module.cpp index fba39960..bad36430 100644 --- a/src/game/warden_module.cpp +++ b/src/game/warden_module.cpp @@ -129,7 +129,7 @@ bool WardenModule::load(const std::vector& moduleData, } bool WardenModule::processCheckRequest(const std::vector& checkData, - std::vector& responseOut) { + [[maybe_unused]] std::vector& responseOut) { if (!loaded_) { std::cerr << "[WardenModule] Module not loaded, cannot process checks" << '\n'; return false; @@ -198,7 +198,7 @@ bool WardenModule::processCheckRequest(const std::vector& checkData, return false; } -uint32_t WardenModule::tick(uint32_t deltaMs) { +uint32_t WardenModule::tick([[maybe_unused]] uint32_t deltaMs) { if (!loaded_ || !funcList_.tick) { return 0; // No tick needed } @@ -209,7 +209,7 @@ uint32_t WardenModule::tick(uint32_t deltaMs) { return 0; } -void WardenModule::generateRC4Keys(uint8_t* packet) { +void WardenModule::generateRC4Keys([[maybe_unused]] uint8_t* packet) { if (!loaded_ || !funcList_.generateRC4Keys) { return; } @@ -633,9 +633,11 @@ bool WardenModule::applyRelocations() { currentOffset += delta; if (currentOffset + 4 <= moduleSize_) { - uint32_t* ptr = reinterpret_cast( - static_cast(moduleMemory_) + currentOffset); - *ptr += moduleBase_; + uint8_t* addr = static_cast(moduleMemory_) + currentOffset; + uint32_t val; + std::memcpy(&val, addr, sizeof(uint32_t)); + val += moduleBase_; + std::memcpy(addr, &val, sizeof(uint32_t)); relocCount++; } else { std::cerr << "[WardenModule] Relocation offset " << currentOffset @@ -755,16 +757,16 @@ bool WardenModule::initializeModule() { void (*logMessage)(const char* msg); }; - // Setup client callbacks - ClientCallbacks callbacks = {}; + // Setup client callbacks (used when calling module entry point below) + [[maybe_unused]] ClientCallbacks callbacks = {}; // Stub callbacks (would need real implementations) - callbacks.sendPacket = [](uint8_t* data, size_t len) { + callbacks.sendPacket = []([[maybe_unused]] uint8_t* data, size_t len) { std::cout << "[WardenModule Callback] sendPacket(" << len << " bytes)" << '\n'; // TODO: Send CMSG_WARDEN_DATA packet }; - callbacks.validateModule = [](uint8_t* hash) { + callbacks.validateModule = []([[maybe_unused]] uint8_t* hash) { std::cout << "[WardenModule Callback] validateModule()" << '\n'; // TODO: Validate module hash }; @@ -779,7 +781,7 @@ bool WardenModule::initializeModule() { free(ptr); }; - callbacks.generateRC4 = [](uint8_t* seed) { + callbacks.generateRC4 = []([[maybe_unused]] uint8_t* seed) { std::cout << "[WardenModule Callback] generateRC4()" << '\n'; // TODO: Re-key RC4 cipher }; diff --git a/src/pipeline/blp_loader.cpp b/src/pipeline/blp_loader.cpp index e0df4639..8c817890 100644 --- a/src/pipeline/blp_loader.cpp +++ b/src/pipeline/blp_loader.cpp @@ -30,34 +30,39 @@ BLPImage BLPLoader::load(const std::vector& blpData) { } BLPImage BLPLoader::loadBLP1(const uint8_t* data, size_t size) { - // BLP1 header has all uint32 fields (different layout from BLP2) - const BLP1Header* header = reinterpret_cast(data); + // Copy header to stack to avoid unaligned reinterpret_cast (UB on strict platforms) + if (size < sizeof(BLP1Header)) { + LOG_ERROR("BLP1 data too small for header"); + return BLPImage(); + } + BLP1Header header; + std::memcpy(&header, data, sizeof(BLP1Header)); BLPImage image; image.format = BLPFormat::BLP1; - image.width = header->width; - image.height = header->height; + image.width = header.width; + image.height = header.height; image.channels = 4; - image.mipLevels = header->hasMips ? 16 : 1; + image.mipLevels = header.hasMips ? 16 : 1; // BLP1 compression: 0=JPEG (not used in WoW), 1=palette/indexed // BLP1 does NOT support DXT — only palette with optional alpha - if (header->compression == 1) { + if (header.compression == 1) { image.compression = BLPCompression::PALETTE; - } else if (header->compression == 0) { + } else if (header.compression == 0) { LOG_WARNING("BLP1 JPEG compression not supported"); return BLPImage(); } else { - LOG_WARNING("BLP1 unknown compression: ", header->compression); + LOG_WARNING("BLP1 unknown compression: ", header.compression); return BLPImage(); } LOG_DEBUG("Loading BLP1: ", image.width, "x", image.height, " ", - getCompressionName(image.compression), " alpha=", header->alphaBits); + getCompressionName(image.compression), " alpha=", header.alphaBits); // Get first mipmap (full resolution) - uint32_t offset = header->mipOffsets[0]; - uint32_t mipSize = header->mipSizes[0]; + uint32_t offset = header.mipOffsets[0]; + uint32_t mipSize = header.mipSizes[0]; if (offset + mipSize > size) { LOG_ERROR("BLP1 mipmap data out of bounds (offset=", offset, " size=", mipSize, " fileSize=", size, ")"); @@ -70,45 +75,50 @@ BLPImage BLPLoader::loadBLP1(const uint8_t* data, size_t size) { int pixelCount = image.width * image.height; image.data.resize(pixelCount * 4); // RGBA8 - decompressPalette(mipData, image.data.data(), header->palette, - image.width, image.height, static_cast(header->alphaBits)); + decompressPalette(mipData, image.data.data(), header.palette, + image.width, image.height, static_cast(header.alphaBits)); return image; } BLPImage BLPLoader::loadBLP2(const uint8_t* data, size_t size) { - // BLP2 header has uint8 fields for compression/alpha/encoding - const BLP2Header* header = reinterpret_cast(data); + // Copy header to stack to avoid unaligned reinterpret_cast (UB on strict platforms) + if (size < sizeof(BLP2Header)) { + LOG_ERROR("BLP2 data too small for header"); + return BLPImage(); + } + BLP2Header header; + std::memcpy(&header, data, sizeof(BLP2Header)); BLPImage image; image.format = BLPFormat::BLP2; - image.width = header->width; - image.height = header->height; + image.width = header.width; + image.height = header.height; image.channels = 4; - image.mipLevels = header->hasMips ? 16 : 1; + image.mipLevels = header.hasMips ? 16 : 1; // BLP2 compression types: // 1 = palette/uncompressed // 2 = DXTC (DXT1/DXT3/DXT5 based on alphaDepth + alphaEncoding) // 3 = plain A8R8G8B8 - if (header->compression == 1) { + if (header.compression == 1) { image.compression = BLPCompression::PALETTE; - } else if (header->compression == 2) { + } else if (header.compression == 2) { // BLP2 DXTC format selection based on alphaDepth + alphaEncoding: // alphaDepth=0 → DXT1 (no alpha) // alphaDepth>0, alphaEncoding=0 → DXT1 (1-bit alpha) // alphaDepth>0, alphaEncoding=1 → DXT3 (explicit 4-bit alpha) // alphaDepth>0, alphaEncoding=7 → DXT5 (interpolated alpha) - if (header->alphaDepth == 0 || header->alphaEncoding == 0) { + if (header.alphaDepth == 0 || header.alphaEncoding == 0) { image.compression = BLPCompression::DXT1; - } else if (header->alphaEncoding == 1) { + } else if (header.alphaEncoding == 1) { image.compression = BLPCompression::DXT3; - } else if (header->alphaEncoding == 7) { + } else if (header.alphaEncoding == 7) { image.compression = BLPCompression::DXT5; } else { image.compression = BLPCompression::DXT1; } - } else if (header->compression == 3) { + } else if (header.compression == 3) { image.compression = BLPCompression::ARGB8888; } else { image.compression = BLPCompression::ARGB8888; @@ -116,13 +126,13 @@ BLPImage BLPLoader::loadBLP2(const uint8_t* data, size_t size) { LOG_DEBUG("Loading BLP2: ", image.width, "x", image.height, " ", getCompressionName(image.compression), - " (comp=", (int)header->compression, " alphaDepth=", (int)header->alphaDepth, - " alphaEnc=", (int)header->alphaEncoding, " mipOfs=", header->mipOffsets[0], - " mipSize=", header->mipSizes[0], ")"); + " (comp=", (int)header.compression, " alphaDepth=", (int)header.alphaDepth, + " alphaEnc=", (int)header.alphaEncoding, " mipOfs=", header.mipOffsets[0], + " mipSize=", header.mipSizes[0], ")"); // Get first mipmap (full resolution) - uint32_t offset = header->mipOffsets[0]; - uint32_t mipSize = header->mipSizes[0]; + uint32_t offset = header.mipOffsets[0]; + uint32_t mipSize = header.mipSizes[0]; if (offset + mipSize > size) { LOG_ERROR("BLP2 mipmap data out of bounds"); @@ -149,8 +159,8 @@ BLPImage BLPLoader::loadBLP2(const uint8_t* data, size_t size) { break; case BLPCompression::PALETTE: - decompressPalette(mipData, image.data.data(), header->palette, - image.width, image.height, header->alphaDepth); + decompressPalette(mipData, image.data.data(), header.palette, + image.width, image.height, header.alphaDepth); break; case BLPCompression::ARGB8888: diff --git a/src/pipeline/dbc_loader.cpp b/src/pipeline/dbc_loader.cpp index dd1d6f52..f7e040da 100644 --- a/src/pipeline/dbc_loader.cpp +++ b/src/pipeline/dbc_loader.cpp @@ -42,19 +42,20 @@ bool DBCFile::load(const std::vector& dbcData) { return false; } - // Read header - const DBCHeader* header = reinterpret_cast(dbcData.data()); + // Read header safely (avoid unaligned reinterpret_cast — UB on strict platforms) + DBCHeader header; + std::memcpy(&header, dbcData.data(), sizeof(DBCHeader)); // Verify magic - if (std::memcmp(header->magic, "WDBC", 4) != 0) { - LOG_ERROR("Invalid DBC magic: ", std::string(header->magic, 4)); + if (std::memcmp(header.magic, "WDBC", 4) != 0) { + LOG_ERROR("Invalid DBC magic: ", std::string(header.magic, 4)); return false; } - recordCount = header->recordCount; - fieldCount = header->fieldCount; - recordSize = header->recordSize; - stringBlockSize = header->stringBlockSize; + recordCount = header.recordCount; + fieldCount = header.fieldCount; + recordSize = header.recordSize; + stringBlockSize = header.stringBlockSize; // Validate sizes uint32_t expectedSize = sizeof(DBCHeader) + (recordCount * recordSize) + stringBlockSize; @@ -111,8 +112,9 @@ uint32_t DBCFile::getUInt32(uint32_t recordIndex, uint32_t fieldIndex) const { return 0; } - const uint32_t* field = reinterpret_cast(record + (fieldIndex * 4)); - return *field; + uint32_t value; + std::memcpy(&value, record + (fieldIndex * 4), sizeof(uint32_t)); + return value; } int32_t DBCFile::getInt32(uint32_t recordIndex, uint32_t fieldIndex) const { @@ -129,8 +131,9 @@ float DBCFile::getFloat(uint32_t recordIndex, uint32_t fieldIndex) const { return 0.0f; } - const float* field = reinterpret_cast(record + (fieldIndex * 4)); - return *field; + float value; + std::memcpy(&value, record + (fieldIndex * 4), sizeof(float)); + return value; } std::string DBCFile::getString(uint32_t recordIndex, uint32_t fieldIndex) const { diff --git a/src/pipeline/m2_loader.cpp b/src/pipeline/m2_loader.cpp index fc4cf6ae..b3d057d6 100644 --- a/src/pipeline/m2_loader.cpp +++ b/src/pipeline/m2_loader.cpp @@ -1456,9 +1456,7 @@ bool M2Loader::loadSkin(const std::vector& skinData, M2Model& model) { if (header.nSubmeshes > 0 && header.ofsSubmeshes > 0) { submeshes = readArray(skinData, header.ofsSubmeshes, header.nSubmeshes); core::Logger::getInstance().debug(" Submeshes: ", submeshes.size()); - for (size_t i = 0; i < submeshes.size(); i++) { - const auto& sm = submeshes[i]; - } + (void)submeshes; } // Read batches with proper submesh references diff --git a/src/rendering/character_renderer.cpp b/src/rendering/character_renderer.cpp index 522f6a48..33ff425a 100644 --- a/src/rendering/character_renderer.cpp +++ b/src/rendering/character_renderer.cpp @@ -1610,7 +1610,7 @@ glm::mat4 CharacterRenderer::getBoneTransform(const pipeline::M2Bone& bone, floa // --- Rendering --- -void CharacterRenderer::render(VkCommandBuffer cmd, VkDescriptorSet perFrameSet, const Camera& camera) { +void CharacterRenderer::render(VkCommandBuffer cmd, VkDescriptorSet perFrameSet, [[maybe_unused]] const Camera& camera) { if (instances.empty() || !opaquePipeline_) { return; } diff --git a/src/rendering/vk_render_target.cpp b/src/rendering/vk_render_target.cpp index 48e3a50e..f2099bbf 100644 --- a/src/rendering/vk_render_target.cpp +++ b/src/rendering/vk_render_target.cpp @@ -19,7 +19,7 @@ bool VkRenderTarget::create(VkContext& ctx, uint32_t width, uint32_t height, // Create color image (multisampled if MSAA) colorImage_ = createImage(device, allocator, width, height, format, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | (useMSAA ? VkImageUsageFlags(0) : VK_IMAGE_USAGE_SAMPLED_BIT), + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | (useMSAA ? static_cast(0) : static_cast(VK_IMAGE_USAGE_SAMPLED_BIT)), msaaSamples); if (!colorImage_.image) { From 20dd5ed63b051e28ee5d8d90d7fcefa79c0b354c Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 19:11:29 -0800 Subject: [PATCH 02/10] Add cross-platform Python asset pipeline GUI --- README.md | 4 + docs/asset-pipeline-gui.md | 56 +++ tools/asset_pipeline_gui.py | 782 ++++++++++++++++++++++++++++++++++++ 3 files changed, 842 insertions(+) create mode 100644 docs/asset-pipeline-gui.md create mode 100644 tools/asset_pipeline_gui.py diff --git a/README.md b/README.md index 8a21add7..6a91237c 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,9 @@ This project requires WoW client data that you extract from your own legally obt Wowee loads assets via an extracted loose-file tree indexed by `manifest.json` (it does not read MPQs at runtime). +For a cross-platform GUI workflow (extraction + texture pack management + active override state), see: +- [Asset Pipeline GUI](docs/asset-pipeline-gui.md) + #### 1) Extract MPQs into `./Data/` ```bash @@ -196,6 +199,7 @@ make -j$(nproc) - [Project Status](docs/status.md) -- Current code state, limitations, and near-term direction - [Quick Start](docs/quickstart.md) -- Installation and first steps - [Build Instructions](BUILD_INSTRUCTIONS.md) -- Detailed dependency, build, and run guide +- [Asset Pipeline GUI](docs/asset-pipeline-gui.md) -- Python GUI for extraction, pack installs, ordering, and override rebuilds ### Technical Documentation - [Architecture](docs/architecture.md) -- System design and module overview diff --git a/docs/asset-pipeline-gui.md b/docs/asset-pipeline-gui.md new file mode 100644 index 00000000..708bb93c --- /dev/null +++ b/docs/asset-pipeline-gui.md @@ -0,0 +1,56 @@ +# Asset Pipeline GUI + +WoWee includes a Python GUI for extraction and texture-pack management: + +```bash +python3 tools/asset_pipeline_gui.py +``` + +## Supported Platforms + +- Linux +- macOS +- Windows + +The app uses Python's built-in `tkinter` module. If `tkinter` is missing, install the platform package: + +- Linux (Debian/Ubuntu): `sudo apt install python3-tk` +- Fedora: `sudo dnf install python3-tkinter` +- Arch: `sudo pacman -S tk` +- macOS: use the official Python.org installer (includes Tk) +- Windows: use the official Python installer and enable Tcl/Tk support + +## What It Does + +- Runs `asset_extract` (or `extract_assets.sh` fallback on non-Windows) +- Saves extraction config in `asset_pipeline/state.json` +- Installs texture packs from ZIP or folders +- Lets users activate/deactivate packs and reorder active pack priority +- Rebuilds `Data/override` from active pack order +- Shows current data state (`manifest.json`, entry count, override file count, last runs) + +## Pack Format + +Supported pack layouts: + +1. `PackName/Data/...` +2. `PackName/data/...` +3. `PackName/...` where top folders include game folders (`Interface`, `World`, `Character`, `Textures`, `Sound`) + +When multiple active packs contain the same file path, **later packs in active order win**. + +## State Files and Folders + +- Pipeline state: `asset_pipeline/state.json` +- Installed packs: `asset_pipeline/packs//` +- Active merged override output: `/override/` + +## Typical Workflow + +1. Open the GUI. +2. Set WoW MPQ Data source and output Data path. +3. Run extraction. +4. Install texture packs. +5. Activate and order packs. +6. Click **Rebuild Override**. +7. Launch wowee with `WOW_DATA_PATH` pointing at your output Data path if needed. diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py new file mode 100644 index 00000000..194ff7bd --- /dev/null +++ b/tools/asset_pipeline_gui.py @@ -0,0 +1,782 @@ +#!/usr/bin/env python3 +"""WoWee Asset Pipeline GUI. + +Cross-platform Tkinter app for running asset extraction and managing texture packs +that are merged into Data/override in deterministic order. +""" + +from __future__ import annotations + +import json +import platform +import queue +import shutil +import subprocess +import threading +import time +import zipfile +from dataclasses import asdict, dataclass, field +from datetime import datetime +from pathlib import Path +from typing import Any + +import tkinter as tk +from tkinter import filedialog, messagebox, ttk +from tkinter.scrolledtext import ScrolledText + + +ROOT_DIR = Path(__file__).resolve().parents[1] +PIPELINE_DIR = ROOT_DIR / "asset_pipeline" +STATE_FILE = PIPELINE_DIR / "state.json" + + +@dataclass +class PackInfo: + pack_id: str + name: str + source: str + installed_dir: str + installed_at: str + file_count: int = 0 + + +@dataclass +class AppState: + wow_data_dir: str = "" + output_data_dir: str = str(ROOT_DIR / "Data") + extractor_path: str = "" + expansion: str = "auto" + locale: str = "auto" + skip_dbc: bool = False + dbc_csv: bool = False + verify: bool = False + verbose: bool = False + threads: int = 0 + packs: list[PackInfo] = field(default_factory=list) + active_pack_ids: list[str] = field(default_factory=list) + last_extract_at: str = "" + last_extract_ok: bool = False + last_extract_command: str = "" + last_override_build_at: str = "" + + +class PipelineManager: + def __init__(self) -> None: + PIPELINE_DIR.mkdir(parents=True, exist_ok=True) + (PIPELINE_DIR / "packs").mkdir(parents=True, exist_ok=True) + self.state = self._load_state() + + def _default_state(self) -> AppState: + return AppState() + + def _load_state(self) -> AppState: + if not STATE_FILE.exists(): + return self._default_state() + try: + doc = json.loads(STATE_FILE.read_text(encoding="utf-8")) + packs = [PackInfo(**item) for item in doc.get("packs", [])] + doc["packs"] = packs + state = AppState(**doc) + return state + except (OSError, ValueError, TypeError): + return self._default_state() + + def save_state(self) -> None: + serializable = asdict(self.state) + STATE_FILE.write_text(json.dumps(serializable, indent=2), encoding="utf-8") + + def now_str(self) -> str: + return datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + def _normalize_id(self, name: str) -> str: + raw = "".join(ch.lower() if ch.isalnum() else "-" for ch in name).strip("-") + base = raw or "pack" + return f"{base}-{int(time.time())}" + + def _pack_dir(self, pack_id: str) -> Path: + return PIPELINE_DIR / "packs" / pack_id + + def _looks_like_data_root(self, path: Path) -> bool: + markers = {"interface", "world", "character", "textures", "sound"} + names = {p.name.lower() for p in path.iterdir() if p.is_dir()} if path.is_dir() else set() + return bool(markers.intersection(names)) + + def find_data_root(self, pack_path: Path) -> Path: + direct_data = pack_path / "Data" + if direct_data.is_dir(): + return direct_data + + lower_data = pack_path / "data" + if lower_data.is_dir(): + return lower_data + + if self._looks_like_data_root(pack_path): + return pack_path + + # Common zip layout: one wrapper directory. + children = [p for p in pack_path.iterdir() if p.is_dir()] if pack_path.is_dir() else [] + if len(children) == 1: + child = children[0] + child_data = child / "Data" + if child_data.is_dir(): + return child_data + if self._looks_like_data_root(child): + return child + + return pack_path + + def _count_files(self, root: Path) -> int: + if not root.exists(): + return 0 + return sum(1 for p in root.rglob("*") if p.is_file()) + + def install_pack_from_zip(self, zip_path: Path) -> PackInfo: + pack_name = zip_path.stem + pack_id = self._normalize_id(pack_name) + target = self._pack_dir(pack_id) + target.mkdir(parents=True, exist_ok=False) + + with zipfile.ZipFile(zip_path, "r") as zf: + zf.extractall(target) + + data_root = self.find_data_root(target) + info = PackInfo( + pack_id=pack_id, + name=pack_name, + source=str(zip_path), + installed_dir=str(target), + installed_at=self.now_str(), + file_count=self._count_files(data_root), + ) + self.state.packs.append(info) + self.save_state() + return info + + def install_pack_from_folder(self, folder_path: Path) -> PackInfo: + pack_name = folder_path.name + pack_id = self._normalize_id(pack_name) + target = self._pack_dir(pack_id) + shutil.copytree(folder_path, target) + + data_root = self.find_data_root(target) + info = PackInfo( + pack_id=pack_id, + name=pack_name, + source=str(folder_path), + installed_dir=str(target), + installed_at=self.now_str(), + file_count=self._count_files(data_root), + ) + self.state.packs.append(info) + self.save_state() + return info + + def uninstall_pack(self, pack_id: str) -> None: + self.state.packs = [p for p in self.state.packs if p.pack_id != pack_id] + self.state.active_pack_ids = [pid for pid in self.state.active_pack_ids if pid != pack_id] + target = self._pack_dir(pack_id) + if target.exists(): + shutil.rmtree(target) + self.save_state() + + def set_pack_active(self, pack_id: str, active: bool) -> None: + if active: + if pack_id not in self.state.active_pack_ids: + self.state.active_pack_ids.append(pack_id) + else: + self.state.active_pack_ids = [pid for pid in self.state.active_pack_ids if pid != pack_id] + self.save_state() + + def move_active_pack(self, pack_id: str, delta: int) -> None: + ids = self.state.active_pack_ids + if pack_id not in ids: + return + idx = ids.index(pack_id) + nidx = idx + delta + if nidx < 0 or nidx >= len(ids): + return + ids[idx], ids[nidx] = ids[nidx], ids[idx] + self.state.active_pack_ids = ids + self.save_state() + + def rebuild_override(self) -> dict[str, int]: + out_dir = Path(self.state.output_data_dir) + override_dir = out_dir / "override" + override_dir.mkdir(parents=True, exist_ok=True) + + if override_dir.exists(): + shutil.rmtree(override_dir) + override_dir.mkdir(parents=True, exist_ok=True) + + copied = 0 + replaced = 0 + + active_map = {p.pack_id: p for p in self.state.packs} + for pack_id in self.state.active_pack_ids: + info = active_map.get(pack_id) + if info is None: + continue + pack_dir = Path(info.installed_dir) + if not pack_dir.exists(): + continue + + data_root = self.find_data_root(pack_dir) + for source in data_root.rglob("*"): + if not source.is_file(): + continue + rel = source.relative_to(data_root) + target = override_dir / rel + target.parent.mkdir(parents=True, exist_ok=True) + if target.exists(): + replaced += 1 + shutil.copy2(source, target) + copied += 1 + + self.state.last_override_build_at = self.now_str() + self.save_state() + return {"copied": copied, "replaced": replaced} + + def _resolve_extractor(self) -> list[str] | None: + configured = self.state.extractor_path.strip() + if configured: + path = Path(configured) + if path.exists() and path.is_file(): + return [str(path)] + + ext = ".exe" if platform.system().lower().startswith("win") else "" + for candidate in [ + ROOT_DIR / "build" / "bin" / f"asset_extract{ext}", + ROOT_DIR / "bin" / f"asset_extract{ext}", + ]: + if candidate.exists(): + return [str(candidate)] + + if platform.system().lower().startswith("win"): + return None + + shell_script = ROOT_DIR / "extract_assets.sh" + if shell_script.exists(): + return ["bash", str(shell_script)] + + return None + + def build_extract_command(self) -> list[str]: + mpq_dir = self.state.wow_data_dir.strip() + output_dir = self.state.output_data_dir.strip() + if not mpq_dir or not output_dir: + raise ValueError("Both WoW Data directory and output directory are required.") + + extractor = self._resolve_extractor() + if extractor is None: + raise ValueError( + "No extractor found. Build asset_extract first or set the extractor path in Configuration." + ) + + if extractor[0].endswith("extract_assets.sh") or extractor[-1].endswith("extract_assets.sh"): + cmd = [*extractor, mpq_dir] + if self.state.expansion and self.state.expansion != "auto": + cmd.append(self.state.expansion) + return cmd + + cmd = [*extractor, "--mpq-dir", mpq_dir, "--output", output_dir] + if self.state.expansion and self.state.expansion != "auto": + cmd.extend(["--expansion", self.state.expansion]) + if self.state.locale and self.state.locale != "auto": + cmd.extend(["--locale", self.state.locale]) + if self.state.skip_dbc: + cmd.append("--skip-dbc") + if self.state.dbc_csv: + cmd.append("--dbc-csv") + if self.state.verify: + cmd.append("--verify") + if self.state.verbose: + cmd.append("--verbose") + if self.state.threads > 0: + cmd.extend(["--threads", str(self.state.threads)]) + return cmd + + def summarize_state(self) -> dict[str, Any]: + output_dir = Path(self.state.output_data_dir) + manifest_path = output_dir / "manifest.json" + override_dir = output_dir / "override" + + summary: dict[str, Any] = { + "output_dir": str(output_dir), + "output_exists": output_dir.exists(), + "manifest_exists": manifest_path.exists(), + "manifest_entries": 0, + "override_exists": override_dir.exists(), + "override_files": self._count_files(override_dir), + "packs_installed": len(self.state.packs), + "packs_active": len(self.state.active_pack_ids), + "last_extract_at": self.state.last_extract_at or "never", + "last_extract_ok": self.state.last_extract_ok, + "last_override_build_at": self.state.last_override_build_at or "never", + } + + if manifest_path.exists(): + try: + doc = json.loads(manifest_path.read_text(encoding="utf-8")) + entries = doc.get("entries", {}) + if isinstance(entries, dict): + summary["manifest_entries"] = len(entries) + except (OSError, ValueError, TypeError): + summary["manifest_entries"] = -1 + + return summary + + +class AssetPipelineGUI: + def __init__(self, root: tk.Tk) -> None: + self.root = root + self.manager = PipelineManager() + + self.log_queue: queue.Queue[str] = queue.Queue() + self.proc_thread: threading.Thread | None = None + self.proc_running = False + + self.root.title("WoWee Asset Pipeline") + self.root.geometry("1120x760") + + self.status_var = tk.StringVar(value="Ready") + self._build_ui() + self._load_vars_from_state() + self.refresh_pack_list() + self.refresh_state_view() + self.root.after(120, self._poll_logs) + + def _build_ui(self) -> None: + top = ttk.Frame(self.root, padding=10) + top.pack(fill="both", expand=True) + + status = ttk.Label(top, textvariable=self.status_var, anchor="w") + status.pack(fill="x", pady=(0, 8)) + + self.notebook = ttk.Notebook(top) + self.notebook.pack(fill="both", expand=True) + + self.cfg_tab = ttk.Frame(self.notebook, padding=10) + self.packs_tab = ttk.Frame(self.notebook, padding=10) + self.state_tab = ttk.Frame(self.notebook, padding=10) + self.logs_tab = ttk.Frame(self.notebook, padding=10) + + self.notebook.add(self.cfg_tab, text="Configuration") + self.notebook.add(self.packs_tab, text="Texture Packs") + self.notebook.add(self.state_tab, text="Current State") + self.notebook.add(self.logs_tab, text="Logs") + + self._build_config_tab() + self._build_packs_tab() + self._build_state_tab() + self._build_logs_tab() + + def _build_config_tab(self) -> None: + self.var_wow_data = tk.StringVar() + self.var_output_data = tk.StringVar() + self.var_extractor = tk.StringVar() + self.var_expansion = tk.StringVar(value="auto") + self.var_locale = tk.StringVar(value="auto") + self.var_skip_dbc = tk.BooleanVar(value=False) + self.var_dbc_csv = tk.BooleanVar(value=False) + self.var_verify = tk.BooleanVar(value=False) + self.var_verbose = tk.BooleanVar(value=False) + self.var_threads = tk.IntVar(value=0) + + frame = self.cfg_tab + + self._path_row(frame, 0, "WoW Data (MPQ source)", self.var_wow_data, self._pick_wow_data_dir) + self._path_row(frame, 1, "Output Data directory", self.var_output_data, self._pick_output_dir) + self._path_row(frame, 2, "Extractor binary/script (optional)", self.var_extractor, self._pick_extractor) + + ttk.Label(frame, text="Expansion").grid(row=3, column=0, sticky="w", pady=6) + exp_combo = ttk.Combobox( + frame, + textvariable=self.var_expansion, + values=["auto", "classic", "turtle", "tbc", "wotlk"], + state="readonly", + width=18, + ) + exp_combo.grid(row=3, column=1, sticky="w", pady=6) + + ttk.Label(frame, text="Locale").grid(row=3, column=2, sticky="w", pady=6) + loc_combo = ttk.Combobox( + frame, + textvariable=self.var_locale, + values=["auto", "enUS", "enGB", "deDE", "frFR", "esES", "esMX", "ruRU", "koKR", "zhCN", "zhTW"], + state="normal", + width=12, + ) + loc_combo.grid(row=3, column=3, sticky="w", pady=6) + + ttk.Label(frame, text="Threads (0 = auto)").grid(row=4, column=0, sticky="w", pady=6) + ttk.Spinbox(frame, from_=0, to=256, textvariable=self.var_threads, width=8).grid( + row=4, column=1, sticky="w", pady=6 + ) + + opts = ttk.Frame(frame) + opts.grid(row=5, column=0, columnspan=4, sticky="w", pady=6) + ttk.Checkbutton(opts, text="Skip DBC extraction", variable=self.var_skip_dbc).pack(side="left", padx=(0, 12)) + ttk.Checkbutton(opts, text="Generate DBC CSV", variable=self.var_dbc_csv).pack(side="left", padx=(0, 12)) + ttk.Checkbutton(opts, text="Verify CRC", variable=self.var_verify).pack(side="left", padx=(0, 12)) + ttk.Checkbutton(opts, text="Verbose output", variable=self.var_verbose).pack(side="left", padx=(0, 12)) + + buttons = ttk.Frame(frame) + buttons.grid(row=6, column=0, columnspan=4, sticky="w", pady=12) + ttk.Button(buttons, text="Save Configuration", command=self.save_config).pack(side="left", padx=(0, 8)) + ttk.Button(buttons, text="Run Extraction", command=self.run_extraction).pack(side="left", padx=(0, 8)) + ttk.Button(buttons, text="Refresh State", command=self.refresh_state_view).pack(side="left") + + tip = ( + "Texture packs are merged into /override in active order. " + "Later packs override earlier packs file-by-file." + ) + ttk.Label(frame, text=tip, foreground="#444").grid(row=7, column=0, columnspan=4, sticky="w", pady=(8, 0)) + + frame.columnconfigure(1, weight=1) + + def _build_packs_tab(self) -> None: + left = ttk.Frame(self.packs_tab) + left.pack(side="left", fill="both", expand=True) + + right = ttk.Frame(self.packs_tab) + right.pack(side="right", fill="y", padx=(12, 0)) + + self.pack_list = tk.Listbox(left, height=22) + self.pack_list.pack(fill="both", expand=True) + self.pack_list.bind("<>", lambda _evt: self._refresh_pack_detail()) + + self.pack_detail = ScrolledText(left, height=10, wrap="word", state="disabled") + self.pack_detail.pack(fill="both", expand=False, pady=(10, 0)) + + ttk.Button(right, text="Install ZIP", width=22, command=self.install_zip).pack(pady=4) + ttk.Button(right, text="Install Folder", width=22, command=self.install_folder).pack(pady=4) + ttk.Separator(right, orient="horizontal").pack(fill="x", pady=8) + ttk.Button(right, text="Activate", width=22, command=self.activate_selected_pack).pack(pady=4) + ttk.Button(right, text="Deactivate", width=22, command=self.deactivate_selected_pack).pack(pady=4) + ttk.Button(right, text="Move Up", width=22, command=lambda: self.move_selected_pack(-1)).pack(pady=4) + ttk.Button(right, text="Move Down", width=22, command=lambda: self.move_selected_pack(1)).pack(pady=4) + ttk.Separator(right, orient="horizontal").pack(fill="x", pady=8) + ttk.Button(right, text="Rebuild Override", width=22, command=self.rebuild_override).pack(pady=4) + ttk.Button(right, text="Uninstall", width=22, command=self.uninstall_selected_pack).pack(pady=4) + + def _build_state_tab(self) -> None: + actions = ttk.Frame(self.state_tab) + actions.pack(fill="x") + ttk.Button(actions, text="Refresh", command=self.refresh_state_view).pack(side="left") + + self.state_text = ScrolledText(self.state_tab, wrap="word", state="disabled") + self.state_text.pack(fill="both", expand=True, pady=(10, 0)) + + def _build_logs_tab(self) -> None: + actions = ttk.Frame(self.logs_tab) + actions.pack(fill="x") + ttk.Button(actions, text="Clear Logs", command=self.clear_logs).pack(side="left") + + self.log_text = ScrolledText(self.logs_tab, wrap="none", state="disabled") + self.log_text.pack(fill="both", expand=True, pady=(10, 0)) + + def _path_row(self, frame: ttk.Frame, row: int, label: str, variable: tk.StringVar, browse_cmd) -> None: + ttk.Label(frame, text=label).grid(row=row, column=0, sticky="w", pady=6) + ttk.Entry(frame, textvariable=variable).grid(row=row, column=1, columnspan=2, sticky="ew", pady=6) + ttk.Button(frame, text="Browse", command=browse_cmd).grid(row=row, column=3, sticky="e", pady=6) + + def _pick_wow_data_dir(self) -> None: + picked = filedialog.askdirectory(title="Select WoW Data directory") + if picked: + self.var_wow_data.set(picked) + + def _pick_output_dir(self) -> None: + picked = filedialog.askdirectory(title="Select output Data directory") + if picked: + self.var_output_data.set(picked) + + def _pick_extractor(self) -> None: + picked = filedialog.askopenfilename(title="Select extractor binary or script") + if picked: + self.var_extractor.set(picked) + + def _load_vars_from_state(self) -> None: + st = self.manager.state + self.var_wow_data.set(st.wow_data_dir) + self.var_output_data.set(st.output_data_dir) + self.var_extractor.set(st.extractor_path) + self.var_expansion.set(st.expansion) + self.var_locale.set(st.locale) + self.var_skip_dbc.set(st.skip_dbc) + self.var_dbc_csv.set(st.dbc_csv) + self.var_verify.set(st.verify) + self.var_verbose.set(st.verbose) + self.var_threads.set(st.threads) + + def save_config(self) -> None: + st = self.manager.state + st.wow_data_dir = self.var_wow_data.get().strip() + st.output_data_dir = self.var_output_data.get().strip() + st.extractor_path = self.var_extractor.get().strip() + st.expansion = self.var_expansion.get().strip() or "auto" + st.locale = self.var_locale.get().strip() or "auto" + st.skip_dbc = bool(self.var_skip_dbc.get()) + st.dbc_csv = bool(self.var_dbc_csv.get()) + st.verify = bool(self.var_verify.get()) + st.verbose = bool(self.var_verbose.get()) + st.threads = int(self.var_threads.get()) + self.manager.save_state() + self.status_var.set("Configuration saved") + + def _selected_pack(self) -> PackInfo | None: + sel = self.pack_list.curselection() + if not sel: + return None + idx = int(sel[0]) + if idx < 0 or idx >= len(self.manager.state.packs): + return None + return self.manager.state.packs[idx] + + def refresh_pack_list(self) -> None: + active = self.manager.state.active_pack_ids + self.pack_list.delete(0, tk.END) + for pack in self.manager.state.packs: + marker = "" + if pack.pack_id in active: + marker = f"[active #{active.index(pack.pack_id) + 1}] " + self.pack_list.insert(tk.END, f"{marker}{pack.name}") + self._refresh_pack_detail() + + def _refresh_pack_detail(self) -> None: + pack = self._selected_pack() + self.pack_detail.configure(state="normal") + self.pack_detail.delete("1.0", tk.END) + if pack is None: + self.pack_detail.insert(tk.END, "Select a texture pack to see details.") + self.pack_detail.configure(state="disabled") + return + + active = "yes" if pack.pack_id in self.manager.state.active_pack_ids else "no" + order = "-" + if pack.pack_id in self.manager.state.active_pack_ids: + order = str(self.manager.state.active_pack_ids.index(pack.pack_id) + 1) + lines = [ + f"Name: {pack.name}", + f"Active: {active}", + f"Order: {order}", + f"Files: {pack.file_count}", + f"Installed at: {pack.installed_at}", + f"Installed dir: {pack.installed_dir}", + f"Source: {pack.source}", + ] + self.pack_detail.insert(tk.END, "\n".join(lines)) + self.pack_detail.configure(state="disabled") + + def install_zip(self) -> None: + zip_path = filedialog.askopenfilename( + title="Choose texture pack ZIP", + filetypes=[("ZIP archives", "*.zip"), ("All files", "*.*")], + ) + if not zip_path: + return + try: + info = self.manager.install_pack_from_zip(Path(zip_path)) + except Exception as exc: # pylint: disable=broad-except + messagebox.showerror("Install failed", str(exc)) + return + + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Installed pack: {info.name}") + + def install_folder(self) -> None: + folder = filedialog.askdirectory(title="Choose texture pack folder") + if not folder: + return + try: + info = self.manager.install_pack_from_folder(Path(folder)) + except Exception as exc: # pylint: disable=broad-except + messagebox.showerror("Install failed", str(exc)) + return + + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Installed pack: {info.name}") + + def activate_selected_pack(self) -> None: + pack = self._selected_pack() + if pack is None: + return + self.manager.set_pack_active(pack.pack_id, True) + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Activated pack: {pack.name}") + + def deactivate_selected_pack(self) -> None: + pack = self._selected_pack() + if pack is None: + return + self.manager.set_pack_active(pack.pack_id, False) + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Deactivated pack: {pack.name}") + + def move_selected_pack(self, delta: int) -> None: + pack = self._selected_pack() + if pack is None: + return + self.manager.move_active_pack(pack.pack_id, delta) + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Reordered active pack: {pack.name}") + + def uninstall_selected_pack(self) -> None: + pack = self._selected_pack() + if pack is None: + return + ok = messagebox.askyesno("Confirm uninstall", f"Uninstall texture pack '{pack.name}'?") + if not ok: + return + self.manager.uninstall_pack(pack.pack_id) + self.refresh_pack_list() + self.refresh_state_view() + self.status_var.set(f"Uninstalled pack: {pack.name}") + + def rebuild_override(self) -> None: + try: + report = self.manager.rebuild_override() + except Exception as exc: # pylint: disable=broad-except + messagebox.showerror("Override rebuild failed", str(exc)) + return + self.refresh_state_view() + self.status_var.set( + f"Override rebuilt: {report['copied']} files copied, {report['replaced']} replaced" + ) + self._append_log( + f"[{self.manager.now_str()}] Override rebuild complete: {report['copied']} copied, {report['replaced']} replaced" + ) + + def clear_logs(self) -> None: + self.log_text.configure(state="normal") + self.log_text.delete("1.0", tk.END) + self.log_text.configure(state="disabled") + + def _append_log(self, line: str) -> None: + self.log_text.configure(state="normal") + self.log_text.insert(tk.END, line + "\n") + self.log_text.see(tk.END) + self.log_text.configure(state="disabled") + + def _poll_logs(self) -> None: + while True: + try: + line = self.log_queue.get_nowait() + except queue.Empty: + break + self._append_log(line) + self.root.after(120, self._poll_logs) + + def run_extraction(self) -> None: + if self.proc_running: + messagebox.showinfo("Extraction running", "An extraction is already running.") + return + + self.save_config() + + try: + cmd = self.manager.build_extract_command() + except ValueError as exc: + messagebox.showerror("Cannot run extraction", str(exc)) + return + + def worker() -> None: + self.proc_running = True + started = self.manager.now_str() + self.log_queue.put(f"[{started}] Running: {' '.join(cmd)}") + self.root.after(0, lambda: self.status_var.set("Extraction running...")) + + ok = False + try: + process = subprocess.Popen( + cmd, + cwd=str(ROOT_DIR), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + bufsize=1, + ) + assert process.stdout is not None + for line in process.stdout: + self.log_queue.put(line.rstrip()) + rc = process.wait() + ok = rc == 0 + if not ok: + self.log_queue.put(f"Extractor exited with status {rc}") + except Exception as exc: # pylint: disable=broad-except + self.log_queue.put(f"Extraction error: {exc}") + finally: + self.manager.state.last_extract_at = self.manager.now_str() + self.manager.state.last_extract_ok = ok + self.manager.state.last_extract_command = " ".join(cmd) + self.manager.save_state() + self.proc_running = False + self.root.after(0, self.refresh_state_view) + self.root.after( + 0, lambda: self.status_var.set("Extraction complete" if ok else "Extraction failed") + ) + + self.proc_thread = threading.Thread(target=worker, daemon=True) + self.proc_thread.start() + + def refresh_state_view(self) -> None: + summary = self.manager.summarize_state() + + active_names = [] + pack_map = {p.pack_id: p.name for p in self.manager.state.packs} + for pid in self.manager.state.active_pack_ids: + active_names.append(pack_map.get(pid, f"")) + + lines = [ + "WoWee Asset Pipeline State", + "", + f"Output directory: {summary['output_dir']}", + f"Output exists: {summary['output_exists']}", + f"manifest.json present: {summary['manifest_exists']}", + f"Manifest entries: {summary['manifest_entries']}", + "", + f"Override folder present: {summary['override_exists']}", + f"Override file count: {summary['override_files']}", + f"Last override build: {summary['last_override_build_at']}", + "", + f"Installed texture packs: {summary['packs_installed']}", + f"Active texture packs: {summary['packs_active']}", + "Active order:", + ] + if active_names: + for i, name in enumerate(active_names, start=1): + lines.append(f" {i}. {name}") + else: + lines.append(" (none)") + + lines.extend( + [ + "", + f"Last extraction time: {summary['last_extract_at']}", + f"Last extraction success: {summary['last_extract_ok']}", + f"Last extraction command: {self.manager.state.last_extract_command or '(none)'}", + "", + "Pipeline files:", + f" State file: {STATE_FILE}", + f" Packs dir: {PIPELINE_DIR / 'packs'}", + ] + ) + + self.state_text.configure(state="normal") + self.state_text.delete("1.0", tk.END) + self.state_text.insert(tk.END, "\n".join(lines)) + self.state_text.configure(state="disabled") + + +def main() -> None: + root = tk.Tk() + AssetPipelineGUI(root) + root.mainloop() + + +if __name__ == "__main__": + main() From f95770720f6541a3f47e68fbe4dbcdc4360fa9e1 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:06:41 -0800 Subject: [PATCH 03/10] Fix security, bugs, and UX in asset pipeline GUI - Fix zip slip vulnerability: validate extracted paths stay within target - Fix redundant mkdir before rmtree in rebuild_override() - Add build/asset_extract and Windows .ps1 fallback to extractor search - Preserve pack list selection across refreshes - Add Cancel Extraction button with process.terminate() - Run override rebuild in background thread to avoid UI freeze - Fix locale combobox state to readonly - Add asset_pipeline/ to .gitignore - Make script executable --- .gitignore | 3 ++ tools/asset_pipeline_gui.py | 67 +++++++++++++++++++++++++++---------- 2 files changed, 52 insertions(+), 18 deletions(-) mode change 100644 => 100755 tools/asset_pipeline_gui.py diff --git a/.gitignore b/.gitignore index 101f6f98..f95c18ed 100644 --- a/.gitignore +++ b/.gitignore @@ -89,6 +89,9 @@ Data/expansions/*/overlay/ Data/hd/ ingest/ +# Asset pipeline state and texture packs +asset_pipeline/ + # Local texture dumps / extracted art should never be committed assets/textures/ node_modules/ diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py old mode 100644 new mode 100755 index 194ff7bd..a3a3c3f7 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -137,7 +137,11 @@ class PipelineManager: target.mkdir(parents=True, exist_ok=False) with zipfile.ZipFile(zip_path, "r") as zf: - zf.extractall(target) + for member in zf.infolist(): + member_path = (target / member.filename).resolve() + if not str(member_path).startswith(str(target.resolve()) + "/") and member_path != target.resolve(): + raise ValueError(f"Zip slip detected: {member.filename!r} escapes target directory") + zf.extract(member, target) data_root = self.find_data_root(target) info = PackInfo( @@ -202,8 +206,6 @@ class PipelineManager: def rebuild_override(self) -> dict[str, int]: out_dir = Path(self.state.output_data_dir) override_dir = out_dir / "override" - override_dir.mkdir(parents=True, exist_ok=True) - if override_dir.exists(): shutil.rmtree(override_dir) override_dir.mkdir(parents=True, exist_ok=True) @@ -243,15 +245,20 @@ class PipelineManager: if path.exists() and path.is_file(): return [str(path)] - ext = ".exe" if platform.system().lower().startswith("win") else "" + is_win = platform.system().lower().startswith("win") + ext = ".exe" if is_win else "" for candidate in [ ROOT_DIR / "build" / "bin" / f"asset_extract{ext}", + ROOT_DIR / "build" / f"asset_extract{ext}", ROOT_DIR / "bin" / f"asset_extract{ext}", ]: if candidate.exists(): return [str(candidate)] - if platform.system().lower().startswith("win"): + if is_win: + ps_script = ROOT_DIR / "extract_assets.ps1" + if ps_script.exists(): + return ["powershell", "-ExecutionPolicy", "Bypass", "-File", str(ps_script)] return None shell_script = ROOT_DIR / "extract_assets.sh" @@ -333,6 +340,7 @@ class AssetPipelineGUI: self.log_queue: queue.Queue[str] = queue.Queue() self.proc_thread: threading.Thread | None = None + self.proc_process: subprocess.Popen | None = None self.proc_running = False self.root.title("WoWee Asset Pipeline") @@ -403,7 +411,7 @@ class AssetPipelineGUI: frame, textvariable=self.var_locale, values=["auto", "enUS", "enGB", "deDE", "frFR", "esES", "esMX", "ruRU", "koKR", "zhCN", "zhTW"], - state="normal", + state="readonly", width=12, ) loc_combo.grid(row=3, column=3, sticky="w", pady=6) @@ -424,6 +432,8 @@ class AssetPipelineGUI: buttons.grid(row=6, column=0, columnspan=4, sticky="w", pady=12) ttk.Button(buttons, text="Save Configuration", command=self.save_config).pack(side="left", padx=(0, 8)) ttk.Button(buttons, text="Run Extraction", command=self.run_extraction).pack(side="left", padx=(0, 8)) + self.cancel_btn = ttk.Button(buttons, text="Cancel Extraction", command=self.cancel_extraction, state="disabled") + self.cancel_btn.pack(side="left", padx=(0, 8)) ttk.Button(buttons, text="Refresh State", command=self.refresh_state_view).pack(side="left") tip = ( @@ -533,6 +543,7 @@ class AssetPipelineGUI: return self.manager.state.packs[idx] def refresh_pack_list(self) -> None: + prev_sel = self.pack_list.curselection() active = self.manager.state.active_pack_ids self.pack_list.delete(0, tk.END) for pack in self.manager.state.packs: @@ -540,6 +551,11 @@ class AssetPipelineGUI: if pack.pack_id in active: marker = f"[active #{active.index(pack.pack_id) + 1}] " self.pack_list.insert(tk.END, f"{marker}{pack.name}") + # Restore previous selection if still valid. + for idx in prev_sel: + if 0 <= idx < self.pack_list.size(): + self.pack_list.selection_set(idx) + self.pack_list.see(idx) self._refresh_pack_detail() def _refresh_pack_detail(self) -> None: @@ -638,18 +654,22 @@ class AssetPipelineGUI: self.status_var.set(f"Uninstalled pack: {pack.name}") def rebuild_override(self) -> None: - try: - report = self.manager.rebuild_override() - except Exception as exc: # pylint: disable=broad-except - messagebox.showerror("Override rebuild failed", str(exc)) - return - self.refresh_state_view() - self.status_var.set( - f"Override rebuilt: {report['copied']} files copied, {report['replaced']} replaced" - ) - self._append_log( - f"[{self.manager.now_str()}] Override rebuild complete: {report['copied']} copied, {report['replaced']} replaced" - ) + self.status_var.set("Rebuilding override...") + self.log_queue.put(f"[{self.manager.now_str()}] Starting override rebuild...") + + def worker() -> None: + try: + report = self.manager.rebuild_override() + msg = f"Override rebuilt: {report['copied']} files copied, {report['replaced']} replaced" + self.log_queue.put(f"[{self.manager.now_str()}] Override rebuild complete: {report['copied']} copied, {report['replaced']} replaced") + self.root.after(0, lambda: self.status_var.set(msg)) + except Exception as exc: # pylint: disable=broad-except + self.log_queue.put(f"[{self.manager.now_str()}] Override rebuild failed: {exc}") + self.root.after(0, lambda: self.status_var.set("Override rebuild failed")) + finally: + self.root.after(0, self.refresh_state_view) + + threading.Thread(target=worker, daemon=True).start() def clear_logs(self) -> None: self.log_text.configure(state="normal") @@ -671,6 +691,12 @@ class AssetPipelineGUI: self._append_log(line) self.root.after(120, self._poll_logs) + def cancel_extraction(self) -> None: + if self.proc_process is not None: + self.proc_process.terminate() + self.log_queue.put(f"[{self.manager.now_str()}] Extraction cancelled by user") + self.status_var.set("Extraction cancelled") + def run_extraction(self) -> None: if self.proc_running: messagebox.showinfo("Extraction running", "An extraction is already running.") @@ -684,6 +710,8 @@ class AssetPipelineGUI: messagebox.showerror("Cannot run extraction", str(exc)) return + self.cancel_btn.configure(state="normal") + def worker() -> None: self.proc_running = True started = self.manager.now_str() @@ -700,6 +728,7 @@ class AssetPipelineGUI: text=True, bufsize=1, ) + self.proc_process = process assert process.stdout is not None for line in process.stdout: self.log_queue.put(line.rstrip()) @@ -710,12 +739,14 @@ class AssetPipelineGUI: except Exception as exc: # pylint: disable=broad-except self.log_queue.put(f"Extraction error: {exc}") finally: + self.proc_process = None self.manager.state.last_extract_at = self.manager.now_str() self.manager.state.last_extract_ok = ok self.manager.state.last_extract_command = " ".join(cmd) self.manager.save_state() self.proc_running = False self.root.after(0, self.refresh_state_view) + self.root.after(0, lambda: self.cancel_btn.configure(state="disabled")) self.root.after( 0, lambda: self.status_var.set("Extraction complete" if ok else "Extraction failed") ) From ef04dde2fe0b9b7a341b20b356b37b17013bc898 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:09:26 -0800 Subject: [PATCH 04/10] Update asset pipeline GUI documentation Cover all new features: cancel extraction, background override rebuild, zip-slip protection, extractor auto-detection order, readonly dropdowns, and selection preservation. Reorganize into per-tab sections with tables. --- docs/asset-pipeline-gui.md | 112 ++++++++++++++++++++++++++++++++----- 1 file changed, 99 insertions(+), 13 deletions(-) diff --git a/docs/asset-pipeline-gui.md b/docs/asset-pipeline-gui.md index 708bb93c..d82137ca 100644 --- a/docs/asset-pipeline-gui.md +++ b/docs/asset-pipeline-gui.md @@ -6,6 +6,8 @@ WoWee includes a Python GUI for extraction and texture-pack management: python3 tools/asset_pipeline_gui.py ``` +The script is also executable directly: `./tools/asset_pipeline_gui.py` + ## Supported Platforms - Linux @@ -22,13 +24,75 @@ The app uses Python's built-in `tkinter` module. If `tkinter` is missing, instal ## What It Does -- Runs `asset_extract` (or `extract_assets.sh` fallback on non-Windows) +- Runs `asset_extract` (or shell/PowerShell script fallback) to extract MPQ data - Saves extraction config in `asset_pipeline/state.json` -- Installs texture packs from ZIP or folders +- Installs texture packs from ZIP or folders (with zip-slip protection) - Lets users activate/deactivate packs and reorder active pack priority -- Rebuilds `Data/override` from active pack order +- Rebuilds `Data/override` from active pack order (runs in background thread) - Shows current data state (`manifest.json`, entry count, override file count, last runs) +## Configuration Tab + +### Path Settings + +| Field | Description | +|-------|-------------| +| **WoW Data (MPQ source)** | Path to your WoW client's `Data/` folder containing `.MPQ` files | +| **Output Data directory** | Where extracted assets land. Defaults to `/Data` | +| **Extractor binary/script** | Optional. Leave blank for auto-detection (see below) | + +### Extractor Auto-Detection + +When no extractor path is configured, the GUI searches in order: + +1. `build/bin/asset_extract` — CMake build with bin subdirectory +2. `build/asset_extract` — CMake build without bin subdirectory +3. `bin/asset_extract` — standalone binary +4. **Windows only**: `extract_assets.ps1` — invoked via `powershell -ExecutionPolicy Bypass -File` +5. **Linux/macOS only**: `extract_assets.sh` — invoked via `bash` + +On Windows, `.exe` is appended to binary candidates automatically. + +### Extraction Options + +| Option | Description | +|--------|-------------| +| **Expansion** | `auto`, `classic`, `turtle`, `tbc`, or `wotlk`. Read-only dropdown. | +| **Locale** | `auto`, `enUS`, `enGB`, `deDE`, `frFR`, etc. Read-only dropdown. | +| **Threads** | Worker thread count. 0 = auto (uses all cores). | +| **Skip DBC extraction** | Skip database client files (faster if you only want textures). | +| **Generate DBC CSV** | Output human-readable CSV alongside binary DBC files. | +| **Verify CRC** | Check file integrity during extraction (slower but safer). | +| **Verbose output** | More detail in the Logs tab. | + +### Buttons + +| Button | Action | +|--------|--------| +| **Save Configuration** | Writes all settings to `asset_pipeline/state.json`. | +| **Run Extraction** | Starts the extractor in a background thread. Output streams to the Logs tab. | +| **Cancel Extraction** | Terminates a running extraction. Grayed out when idle, active during extraction. | +| **Refresh State** | Reloads the Current State tab. | + +## Texture Packs Tab + +### Installing Packs + +- **Install ZIP**: Opens a file picker for `.zip` archives. Each member path is validated against zip-slip attacks before extraction. +- **Install Folder**: Opens a folder picker and copies the entire folder into the pipeline's internal pack storage. + +### Managing Packs + +| Button | Action | +|--------|--------| +| **Activate** | Adds the selected pack to the active override list. | +| **Deactivate** | Removes the selected pack from the active list (stays installed). | +| **Move Up / Move Down** | Changes priority order. Pack #1 is the base layer; higher numbers override lower. | +| **Rebuild Override** | Merges all active packs into `Data/override/` in a background thread. UI stays responsive. | +| **Uninstall** | Removes the pack from disk after confirmation. | + +Pack list selection is preserved across refreshes — you can activate a pack and immediately reorder it without re-selecting. + ## Pack Format Supported pack layouts: @@ -36,21 +100,43 @@ Supported pack layouts: 1. `PackName/Data/...` 2. `PackName/data/...` 3. `PackName/...` where top folders include game folders (`Interface`, `World`, `Character`, `Textures`, `Sound`) +4. Single wrapper directory containing any of the above When multiple active packs contain the same file path, **later packs in active order win**. +## Current State Tab + +Shows a summary of pipeline state: + +- Output directory existence and `manifest.json` entry count +- Override folder file count and last build timestamp +- Installed and active pack counts with priority order +- Last extraction time, success/failure, and the exact command used +- Paths to the state file and packs directory + +Click **Refresh** to reload, or it auto-refreshes after operations. + +## Logs Tab + +All extraction output, override rebuild messages, cancellations, and errors stream here in real time via a log queue polled every 120ms. Click **Clear Logs** to reset. + ## State Files and Folders -- Pipeline state: `asset_pipeline/state.json` -- Installed packs: `asset_pipeline/packs//` -- Active merged override output: `/override/` +| Path | Description | +|------|-------------| +| `asset_pipeline/state.json` | All configuration, pack metadata, and extraction history | +| `asset_pipeline/packs//` | Installed pack contents (one directory per pack) | +| `/override/` | Merged output from active packs | + +The `asset_pipeline/` directory is gitignored. ## Typical Workflow -1. Open the GUI. -2. Set WoW MPQ Data source and output Data path. -3. Run extraction. -4. Install texture packs. -5. Activate and order packs. -6. Click **Rebuild Override**. -7. Launch wowee with `WOW_DATA_PATH` pointing at your output Data path if needed. +1. Launch: `python3 tools/asset_pipeline_gui.py` +2. **Configuration tab**: Browse to your WoW `Data/` folder, pick expansion, click **Save Configuration**. +3. Click **Run Extraction** — watch progress in the **Logs** tab. Cancel with **Cancel Extraction** if needed. +4. Switch to **Texture Packs** tab. Click **Install ZIP** and pick a texture pack. +5. Select the pack and click **Activate**. +6. (Optional) Install more packs, activate them, and use **Move Up/Down** to set priority. +7. Click **Rebuild Override** — the status bar shows progress, and the result appears in Logs. +8. Run wowee — it loads override textures on top of the extracted base assets. From 739ae5b56967c6ed97f23a9c9ff302aa243e94f3 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:35:32 -0800 Subject: [PATCH 05/10] Add Asset Browser tab to pipeline GUI with inline previews New tab lets users explore extracted assets visually: BLP textures rendered via blp_convert+Pillow, M2/WMO wireframes with mouse-drag rotation and zoom, DBC/CSV tables with named columns from dbc_layouts.json, ADT heightmap grids, text file viewer, audio metadata, and hex dumps. Directory tree lazy-loads from manifest.json with search and file-type filtering. --- docs/asset-pipeline-gui.md | 54 +- tools/asset_pipeline_gui.py | 968 ++++++++++++++++++++++++++++++++++++ 2 files changed, 1021 insertions(+), 1 deletion(-) diff --git a/docs/asset-pipeline-gui.md b/docs/asset-pipeline-gui.md index d82137ca..08c66711 100644 --- a/docs/asset-pipeline-gui.md +++ b/docs/asset-pipeline-gui.md @@ -30,6 +30,7 @@ The app uses Python's built-in `tkinter` module. If `tkinter` is missing, instal - Lets users activate/deactivate packs and reorder active pack priority - Rebuilds `Data/override` from active pack order (runs in background thread) - Shows current data state (`manifest.json`, entry count, override file count, last runs) +- Browses extracted assets with inline previews (images, 3D wireframes, data tables, text, hex dumps) ## Configuration Tab @@ -104,6 +105,55 @@ Supported pack layouts: When multiple active packs contain the same file path, **later packs in active order win**. +## Asset Browser Tab + +Browse and preview every extracted asset visually. Requires a completed extraction with a `manifest.json` in the output directory. + +### Layout + +- **Top bar**: Search entry, file type filter dropdown, Search/Reset buttons, result count +- **Left panel** (~30%): Directory tree built lazily from `manifest.json` +- **Right panel** (~70%): Preview area that adapts to the selected file type +- **Bottom bar**: File path, size, and CRC from manifest + +### Search and Filtering + +Type a substring into the search box and/or pick a file type from the dropdown, then click **Search**. The tree repopulates with matching results (capped at 5000 entries). Click **Reset** to restore the full tree. + +File type filters: All, BLP, M2, WMO, DBC, ADT, Audio, Text. + +### Preview Types + +| Type | What You See | +|------|--------------| +| **BLP** | Converted to PNG via `blp_convert --to-png`, displayed as an image. Cached in `asset_pipeline/preview_cache/`. | +| **M2** | Wireframe rendering of model vertices and triangles on a Canvas. Drag to rotate, scroll to zoom. | +| **WMO** | Wireframe of group geometry (MOVT/MOVI chunks). Root WMOs auto-load the `_000` group file. | +| **CSV** (DBC exports) | Scrollable table with column names from `dbc_layouts.json`. First 500 rows loaded, click "Load more" for the rest. | +| **ADT** | Colored heightmap grid parsed from MCNK chunks. | +| **Text** (XML, LUA, JSON, HTML, TOC) | Syntax-highlighted scrollable text view. | +| **Audio** (WAV, MP3, OGG) | Metadata display — format, channels, sample rate, duration (WAV). | +| **Other** | Hex dump of the first 512 bytes. | + +### Wireframe Controls + +- **Left-click drag**: Rotate the model (azimuth + elevation) +- **Scroll wheel**: Zoom in/out +- Depth coloring: closer geometry renders brighter + +### Optional Dependencies + +| Dependency | Required For | Fallback | +|------------|-------------|----------| +| [Pillow](https://pypi.org/project/Pillow/) (`pip install Pillow`) | BLP image preview | Shows install instructions | +| `blp_convert` (built with project) | BLP → PNG conversion | Shows "not found" message | + +All other previews (wireframe, table, text, hex) work without any extra dependencies. + +### Cache + +BLP previews are cached as PNG files in `asset_pipeline/preview_cache/` keyed by path and file size. Delete this directory to clear the cache. + ## Current State Tab Shows a summary of pipeline state: @@ -126,6 +176,7 @@ All extraction output, override rebuild messages, cancellations, and errors stre |------|-------------| | `asset_pipeline/state.json` | All configuration, pack metadata, and extraction history | | `asset_pipeline/packs//` | Installed pack contents (one directory per pack) | +| `asset_pipeline/preview_cache/` | Cached BLP → PNG conversions for the Asset Browser | | `/override/` | Merged output from active packs | The `asset_pipeline/` directory is gitignored. @@ -139,4 +190,5 @@ The `asset_pipeline/` directory is gitignored. 5. Select the pack and click **Activate**. 6. (Optional) Install more packs, activate them, and use **Move Up/Down** to set priority. 7. Click **Rebuild Override** — the status bar shows progress, and the result appears in Logs. -8. Run wowee — it loads override textures on top of the extracted base assets. +8. (Optional) Switch to **Asset Browser** to explore extracted files — preview textures, inspect models, browse DBC tables. +9. Run wowee — it loads override textures on top of the extracted base assets. diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py index a3a3c3f7..0e87a7d8 100755 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -7,11 +7,16 @@ that are merged into Data/override in deterministic order. from __future__ import annotations +import hashlib import json +import math +import os import platform import queue import shutil +import struct import subprocess +import tempfile import threading import time import zipfile @@ -24,6 +29,12 @@ import tkinter as tk from tkinter import filedialog, messagebox, ttk from tkinter.scrolledtext import ScrolledText +try: + from PIL import Image, ImageTk + HAS_PILLOW = True +except ImportError: + HAS_PILLOW = False + ROOT_DIR = Path(__file__).resolve().parents[1] PIPELINE_DIR = ROOT_DIR / "asset_pipeline" @@ -365,16 +376,19 @@ class AssetPipelineGUI: self.cfg_tab = ttk.Frame(self.notebook, padding=10) self.packs_tab = ttk.Frame(self.notebook, padding=10) + self.browser_tab = ttk.Frame(self.notebook, padding=4) self.state_tab = ttk.Frame(self.notebook, padding=10) self.logs_tab = ttk.Frame(self.notebook, padding=10) self.notebook.add(self.cfg_tab, text="Configuration") self.notebook.add(self.packs_tab, text="Texture Packs") + self.notebook.add(self.browser_tab, text="Asset Browser") self.notebook.add(self.state_tab, text="Current State") self.notebook.add(self.logs_tab, text="Logs") self._build_config_tab() self._build_packs_tab() + self._build_browser_tab() self._build_state_tab() self._build_logs_tab() @@ -469,6 +483,960 @@ class AssetPipelineGUI: ttk.Button(right, text="Rebuild Override", width=22, command=self.rebuild_override).pack(pady=4) ttk.Button(right, text="Uninstall", width=22, command=self.uninstall_selected_pack).pack(pady=4) + # ── Asset Browser Tab ────────────────────────────────────────────── + + def _build_browser_tab(self) -> None: + self._browser_manifest: dict[str, dict] = {} + self._browser_manifest_list: list[str] = [] + self._browser_tree_populated: set[str] = set() + self._browser_photo: Any = None # prevent GC of PhotoImage + self._browser_wireframe_verts: list[tuple[float, float, float]] = [] + self._browser_wireframe_tris: list[tuple[int, int, int]] = [] + self._browser_az = 0.0 + self._browser_el = 0.3 + self._browser_zoom = 1.0 + self._browser_drag_start: tuple[int, int] | None = None + self._browser_dbc_rows: list[list[str]] = [] + self._browser_dbc_shown = 0 + + # Top bar: search + filter + top_bar = ttk.Frame(self.browser_tab) + top_bar.pack(fill="x", pady=(0, 4)) + + ttk.Label(top_bar, text="Search:").pack(side="left") + self._browser_search_var = tk.StringVar() + search_entry = ttk.Entry(top_bar, textvariable=self._browser_search_var, width=40) + search_entry.pack(side="left", padx=(4, 8)) + search_entry.bind("", lambda _: self._browser_do_search()) + + ttk.Label(top_bar, text="Type:").pack(side="left") + self._browser_type_var = tk.StringVar(value="All") + type_combo = ttk.Combobox( + top_bar, + textvariable=self._browser_type_var, + values=["All", "BLP", "M2", "WMO", "DBC", "ADT", "Audio", "Text"], + state="readonly", + width=8, + ) + type_combo.pack(side="left", padx=(4, 8)) + + ttk.Button(top_bar, text="Search", command=self._browser_do_search).pack(side="left", padx=(0, 4)) + ttk.Button(top_bar, text="Reset", command=self._browser_reset_search).pack(side="left") + + self._browser_count_var = tk.StringVar(value="") + ttk.Label(top_bar, textvariable=self._browser_count_var).pack(side="right") + + # Main paned: left tree + right preview + paned = ttk.PanedWindow(self.browser_tab, orient="horizontal") + paned.pack(fill="both", expand=True) + + # Left: directory tree + left_frame = ttk.Frame(paned) + paned.add(left_frame, weight=1) + + tree_scroll = ttk.Scrollbar(left_frame, orient="vertical") + self._browser_tree = ttk.Treeview(left_frame, show="tree", yscrollcommand=tree_scroll.set) + tree_scroll.config(command=self._browser_tree.yview) + self._browser_tree.pack(side="left", fill="both", expand=True) + tree_scroll.pack(side="right", fill="y") + + self._browser_tree.bind("<>", self._browser_on_expand) + self._browser_tree.bind("<>", self._browser_on_select) + + # Right: preview area + right_frame = ttk.Frame(paned) + paned.add(right_frame, weight=3) + + self._browser_preview_frame = ttk.Frame(right_frame) + self._browser_preview_frame.pack(fill="both", expand=True) + + # Bottom bar: file info + self._browser_info_var = tk.StringVar(value="Select a file to preview") + info_bar = ttk.Label(self.browser_tab, textvariable=self._browser_info_var, anchor="w", relief="sunken") + info_bar.pack(fill="x", pady=(4, 0)) + + # Load manifest + self._browser_load_manifest() + + def _browser_load_manifest(self) -> None: + output_dir = Path(self.manager.state.output_data_dir) + manifest_path = output_dir / "manifest.json" + if not manifest_path.exists(): + self._browser_count_var.set("No manifest.json found") + return + + try: + doc = json.loads(manifest_path.read_text(encoding="utf-8")) + entries = doc.get("entries", {}) + if isinstance(entries, dict): + self._browser_manifest = entries + self._browser_manifest_list = sorted(entries.keys(), key=str.lower) + self._browser_count_var.set(f"{len(entries)} entries") + except (OSError, ValueError, TypeError) as exc: + self._browser_count_var.set(f"Manifest error: {exc}") + return + + self._browser_populate_tree_root() + + def _browser_populate_tree_root(self) -> None: + self._browser_tree.delete(*self._browser_tree.get_children()) + self._browser_tree_populated.clear() + + # Build top-level directories + top_dirs: set[str] = set() + for path in self._browser_manifest_list: + parts = path.split("/") + if len(parts) > 1: + top_dirs.add(parts[0]) + else: + top_dirs.add(path) + + for name in sorted(top_dirs, key=str.lower): + # Check if this is a directory (has children) or a file + is_dir = any(p.startswith(name + "/") for p in self._browser_manifest_list) + if is_dir: + node = self._browser_tree.insert("", "end", iid=name, text=name, open=False) + # Insert dummy child for lazy loading + self._browser_tree.insert(node, "end", iid=name + "/__dummy__", text="") + else: + self._browser_tree.insert("", "end", iid=name, text=name) + + def _browser_on_expand(self, event: Any) -> None: + node = self._browser_tree.focus() + if not node or node in self._browser_tree_populated: + return + self._browser_tree_populated.add(node) + + # Remove dummy child + dummy = node + "/__dummy__" + if self._browser_tree.exists(dummy): + self._browser_tree.delete(dummy) + + prefix = node + "/" + # Collect immediate children + child_dirs: set[str] = set() + child_files: list[str] = [] + + for path in self._browser_manifest_list: + if not path.startswith(prefix): + continue + remainder = path[len(prefix):] + parts = remainder.split("/") + if len(parts) > 1: + child_dirs.add(parts[0]) + else: + child_files.append(parts[0]) + + for d in sorted(child_dirs, key=str.lower): + child_id = prefix + d + if not self._browser_tree.exists(child_id): + n = self._browser_tree.insert(node, "end", iid=child_id, text=d, open=False) + self._browser_tree.insert(n, "end", iid=child_id + "/__dummy__", text="") + + for f in sorted(child_files, key=str.lower): + child_id = prefix + f + if not self._browser_tree.exists(child_id): + self._browser_tree.insert(node, "end", iid=child_id, text=f) + + def _browser_on_select(self, event: Any) -> None: + sel = self._browser_tree.selection() + if not sel: + return + path = sel[0] + entry = self._browser_manifest.get(path) + if entry is None: + # It's a directory node + self._browser_info_var.set(f"Directory: {path}") + return + self._browser_preview_file(path, entry) + + def _browser_do_search(self) -> None: + query = self._browser_search_var.get().strip().lower() + type_filter = self._browser_type_var.get() + + type_exts: dict[str, set[str]] = { + "BLP": {".blp"}, + "M2": {".m2"}, + "WMO": {".wmo"}, + "DBC": {".dbc", ".csv"}, + "ADT": {".adt"}, + "Audio": {".wav", ".mp3", ".ogg"}, + "Text": {".xml", ".lua", ".json", ".html", ".toc", ".txt", ".wtf"}, + } + + results: list[str] = [] + exts = type_exts.get(type_filter) + for path in self._browser_manifest_list: + if exts: + ext = os.path.splitext(path)[1].lower() + if ext not in exts: + continue + if query and query not in path.lower(): + continue + results.append(path) + + # Repopulate tree with filtered results + self._browser_tree.delete(*self._browser_tree.get_children()) + self._browser_tree_populated.clear() + + if len(results) > 5000: + # Too many results — show directory structure + self._browser_count_var.set(f"{len(results)} results (showing first 5000)") + results = results[:5000] + else: + self._browser_count_var.set(f"{len(results)} results") + + # Build tree from filtered results + dirs_added: set[str] = set() + for path in results: + parts = path.split("/") + # Ensure parent directories exist + for i in range(1, len(parts)): + dir_id = "/".join(parts[:i]) + if dir_id not in dirs_added: + dirs_added.add(dir_id) + parent_id = "/".join(parts[:i - 1]) if i > 1 else "" + if not self._browser_tree.exists(dir_id): + self._browser_tree.insert(parent_id, "end", iid=dir_id, text=parts[i - 1], open=True) + # Insert file + parent_id = "/".join(parts[:-1]) if len(parts) > 1 else "" + if not self._browser_tree.exists(path): + self._browser_tree.insert(parent_id, "end", iid=path, text=parts[-1]) + self._browser_tree_populated.add(parent_id) + + def _browser_reset_search(self) -> None: + self._browser_search_var.set("") + self._browser_type_var.set("All") + self._browser_populate_tree_root() + self._browser_count_var.set(f"{len(self._browser_manifest)} entries") + + def _browser_clear_preview(self) -> None: + for widget in self._browser_preview_frame.winfo_children(): + widget.destroy() + self._browser_photo = None + + def _browser_file_ext(self, path: str) -> str: + return os.path.splitext(path)[1].lower() + + def _browser_resolve_path(self, manifest_path: str) -> Path | None: + entry = self._browser_manifest.get(manifest_path) + if entry is None: + return None + rel = entry.get("p", manifest_path) + output_dir = Path(self.manager.state.output_data_dir) + full = output_dir / rel + if full.exists(): + return full + return None + + def _browser_preview_file(self, path: str, entry: dict) -> None: + self._browser_clear_preview() + + size = entry.get("s", 0) + crc = entry.get("h", "") + ext = self._browser_file_ext(path) + + self._browser_info_var.set(f"{path} | Size: {self._format_size(size)} | CRC: {crc}") + + if ext == ".blp": + self._browser_preview_blp(path, entry) + elif ext == ".m2": + self._browser_preview_m2(path, entry) + elif ext == ".wmo": + self._browser_preview_wmo(path, entry) + elif ext in (".csv",): + self._browser_preview_dbc(path, entry) + elif ext == ".adt": + self._browser_preview_adt(path, entry) + elif ext in (".xml", ".lua", ".json", ".html", ".toc", ".txt", ".wtf", ".ini"): + self._browser_preview_text(path, entry) + elif ext in (".wav", ".mp3", ".ogg"): + self._browser_preview_audio(path, entry) + else: + self._browser_preview_hex(path, entry) + + def _format_size(self, size: int) -> str: + if size < 1024: + return f"{size} B" + elif size < 1024 * 1024: + return f"{size / 1024:.1f} KB" + else: + return f"{size / (1024 * 1024):.1f} MB" + + # ── BLP Preview ── + + def _browser_preview_blp(self, path: str, entry: dict) -> None: + if not HAS_PILLOW: + lbl = ttk.Label(self._browser_preview_frame, text="Install Pillow for image preview:\n pip install Pillow", anchor="center") + lbl.pack(expand=True) + return + + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + # Check for blp_convert + blp_convert = ROOT_DIR / "build" / "bin" / "blp_convert" + if not blp_convert.exists(): + ttk.Label(self._browser_preview_frame, text="blp_convert not found in build/bin/\nBuild the project first.").pack(expand=True) + return + + # Cache directory + cache_dir = PIPELINE_DIR / "preview_cache" + cache_dir.mkdir(parents=True, exist_ok=True) + + cache_key = hashlib.md5(f"{path}:{entry.get('s', 0)}".encode()).hexdigest() + cached_png = cache_dir / f"{cache_key}.png" + + if not cached_png.exists(): + # Convert BLP to PNG + try: + with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp: + tmp_path = tmp.name + result = subprocess.run( + [str(blp_convert), "--to-png", str(file_path), tmp_path], + capture_output=True, text=True, timeout=10 + ) + if result.returncode != 0: + ttk.Label(self._browser_preview_frame, text=f"blp_convert failed:\n{result.stderr[:500]}").pack(expand=True) + try: + os.unlink(tmp_path) + except OSError: + pass + return + shutil.move(tmp_path, cached_png) + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Conversion error: {exc}").pack(expand=True) + return + + # Load and display + try: + img = Image.open(cached_png) + orig_w, orig_h = img.size + + # Fit to preview area + max_w = self._browser_preview_frame.winfo_width() or 600 + max_h = self._browser_preview_frame.winfo_height() or 500 + max_w = max(max_w - 20, 200) + max_h = max(max_h - 40, 200) + + scale = min(max_w / orig_w, max_h / orig_h, 1.0) + if scale < 1.0: + new_w = int(orig_w * scale) + new_h = int(orig_h * scale) + img = img.resize((new_w, new_h), Image.LANCZOS) + + self._browser_photo = ImageTk.PhotoImage(img) + info_text = f"{orig_w} x {orig_h}" + ttk.Label(self._browser_preview_frame, text=info_text).pack(pady=(4, 2)) + lbl = ttk.Label(self._browser_preview_frame, image=self._browser_photo) + lbl.pack(expand=True) + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Image load error: {exc}").pack(expand=True) + + # ── M2 Wireframe Preview ── + + def _browser_preview_m2(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + try: + data = file_path.read_bytes() + if len(data) < 108: + ttk.Label(self._browser_preview_frame, text="M2 file too small").pack(expand=True) + return + + magic = data[:4] + if magic != b"MD20": + ttk.Label(self._browser_preview_frame, text=f"Not an M2 file (magic: {magic!r})").pack(expand=True) + return + + version = struct.unpack_from(" 500000 or ofs_verts + n_verts * 48 > len(data): + ttk.Label(self._browser_preview_frame, text=f"M2: {n_verts} vertices (no preview)").pack(expand=True) + return + + verts: list[tuple[float, float, float]] = [] + for i in range(n_verts): + off = ofs_verts + i * 48 + x, y, z = struct.unpack_from(" list[tuple[int, int, int]]: + if len(data) < 48: + return [] + + # Check for SKIN magic + off = 0 + if data[:4] == b"SKIN": + off = 4 + + n_indices, ofs_indices = struct.unpack_from(" 500000: + return [] + if n_tris == 0 or n_tris > 500000: + return [] + + # Indices are uint16 vertex lookup + if ofs_indices + n_indices * 2 > len(data): + return [] + indices = list(struct.unpack_from(f"<{n_indices}H", data, ofs_indices)) + + # Triangles are uint16 index-into-indices + if ofs_tris + n_tris * 2 > len(data): + return [] + tri_idx = list(struct.unpack_from(f"<{n_tris}H", data, ofs_tris)) + + tris: list[tuple[int, int, int]] = [] + for i in range(0, len(tri_idx) - 2, 3): + a, b, c = tri_idx[i], tri_idx[i + 1], tri_idx[i + 2] + if a < n_indices and b < n_indices and c < n_indices: + tris.append((indices[a], indices[b], indices[c])) + + return tris + + # ── WMO Preview ── + + def _browser_preview_wmo(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + # Check if this is a root WMO or group WMO + name = file_path.name.lower() + # Group WMOs typically end with _NNN.wmo + is_group = len(name) > 8 and name[-8:-4].isdigit() and name[-9] == "_" + + try: + if is_group: + verts, tris = self._parse_wmo_group(file_path) + else: + # Root WMO — try to load first group + verts, tris = self._parse_wmo_root_first_group(file_path) + + if not verts: + data = file_path.read_bytes() + if len(data) >= 24 and data[:4] in (b"MVER", b"REVM"): + n_groups = 0 + # Try to find nGroups in MOHD chunk + pos = 0 + while pos < len(data) - 8: + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from("= 16: + n_groups = struct.unpack_from(" tuple[list[tuple[float, float, float]], list[tuple[int, int, int]]]: + data = file_path.read_bytes() + verts: list[tuple[float, float, float]] = [] + tris: list[tuple[int, int, int]] = [] + + pos = 0 + while pos < len(data) - 8: + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from(" tuple[list[tuple[float, float, float]], list[tuple[int, int, int]]]: + # Try _000.wmo + stem = file_path.stem + group_path = file_path.parent / f"{stem}_000.wmo" + if group_path.exists(): + return self._parse_wmo_group(group_path) + return [], [] + + # ── Wireframe Canvas (shared M2/WMO) ── + + def _browser_create_wireframe_canvas(self) -> None: + canvas = tk.Canvas(self._browser_preview_frame, bg="#1a1a2e", highlightthickness=0) + canvas.pack(fill="both", expand=True) + self._browser_canvas = canvas + + canvas.bind("", self._browser_wf_mouse_down) + canvas.bind("", self._browser_wf_mouse_drag) + canvas.bind("", self._browser_wf_scroll) + canvas.bind("", lambda e: self._browser_wf_scroll_linux(e, 1)) + canvas.bind("", lambda e: self._browser_wf_scroll_linux(e, -1)) + canvas.bind("", lambda e: self._browser_wf_render()) + + self.root.after(50, self._browser_wf_render) + + def _browser_wf_mouse_down(self, event: Any) -> None: + self._browser_drag_start = (event.x, event.y) + + def _browser_wf_mouse_drag(self, event: Any) -> None: + if self._browser_drag_start is None: + return + dx = event.x - self._browser_drag_start[0] + dy = event.y - self._browser_drag_start[1] + self._browser_az += dx * 0.01 + self._browser_el += dy * 0.01 + self._browser_el = max(-math.pi / 2, min(math.pi / 2, self._browser_el)) + self._browser_drag_start = (event.x, event.y) + self._browser_wf_render() + + def _browser_wf_scroll(self, event: Any) -> None: + if event.delta > 0: + self._browser_zoom *= 1.1 + else: + self._browser_zoom /= 1.1 + self._browser_wf_render() + + def _browser_wf_scroll_linux(self, event: Any, direction: int) -> None: + if direction > 0: + self._browser_zoom *= 1.1 + else: + self._browser_zoom /= 1.1 + self._browser_wf_render() + + def _browser_wf_render(self) -> None: + canvas = self._browser_canvas + canvas.delete("all") + w = canvas.winfo_width() + h = canvas.winfo_height() + if w < 10 or h < 10: + return + + verts = self._browser_wireframe_verts + tris = self._browser_wireframe_tris + if not verts: + return + + # Compute bounding box for auto-scale + xs = [v[0] for v in verts] + ys = [v[1] for v in verts] + zs = [v[2] for v in verts] + cx = (min(xs) + max(xs)) / 2 + cy = (min(ys) + max(ys)) / 2 + cz = (min(zs) + max(zs)) / 2 + extent = max(max(xs) - min(xs), max(ys) - min(ys), max(zs) - min(zs), 0.001) + scale = min(w, h) * 0.4 / extent * self._browser_zoom + + # Rotation matrix (azimuth around Z, elevation around X) + cos_a, sin_a = math.cos(self._browser_az), math.sin(self._browser_az) + cos_e, sin_e = math.cos(self._browser_el), math.sin(self._browser_el) + + def project(v: tuple[float, float, float]) -> tuple[float, float, float]: + x, y, z = v[0] - cx, v[1] - cy, v[2] - cz + # Rotate around Z (azimuth) + rx = x * cos_a - y * sin_a + ry = x * sin_a + y * cos_a + rz = z + # Rotate around X (elevation) + ry2 = ry * cos_e - rz * sin_e + rz2 = ry * sin_e + rz * cos_e + return (w / 2 + rx * scale, h / 2 - rz2 * scale, ry2) + + projected = [project(v) for v in verts] + + # Depth-sort triangles + if tris: + tri_depths: list[tuple[float, int]] = [] + for i, (a, b, c) in enumerate(tris): + if a < len(projected) and b < len(projected) and c < len(projected): + avg_depth = (projected[a][2] + projected[b][2] + projected[c][2]) / 3 + tri_depths.append((avg_depth, i)) + tri_depths.sort() + + # Draw max 20000 triangles for performance + max_draw = min(len(tri_depths), 20000) + min_d = tri_depths[0][0] if tri_depths else 0 + max_d = tri_depths[-1][0] if tri_depths else 1 + d_range = max_d - min_d if max_d != min_d else 1 + + for j in range(max_draw): + depth, idx = tri_depths[j] + a, b, c = tris[idx] + if a >= len(projected) or b >= len(projected) or c >= len(projected): + continue + + # Depth coloring: closer = brighter + t = 1.0 - (depth - min_d) / d_range + intensity = int(60 + t * 160) + color = f"#{intensity:02x}{intensity:02x}{int(intensity * 1.2) & 0xff:02x}" + + p1, p2, p3 = projected[a], projected[b], projected[c] + canvas.create_line(p1[0], p1[1], p2[0], p2[1], fill=color, width=1) + canvas.create_line(p2[0], p2[1], p3[0], p3[1], fill=color, width=1) + canvas.create_line(p3[0], p3[1], p1[0], p1[1], fill=color, width=1) + + # ── DBC/CSV Preview ── + + def _browser_preview_dbc(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + try: + text = file_path.read_text(encoding="utf-8", errors="replace") + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Read error: {exc}").pack(expand=True) + return + + lines = text.splitlines() + if not lines: + ttk.Label(self._browser_preview_frame, text="Empty file").pack(expand=True) + return + + # Parse header comment if present + header_line = "" + data_start = 0 + if lines[0].startswith("#"): + header_line = lines[0] + data_start = 1 + + # Split CSV + rows: list[list[str]] = [] + for line in lines[data_start:]: + if line.strip(): + rows.append(line.split(",")) + self._browser_dbc_rows = rows + self._browser_dbc_shown = 0 + + if not rows: + ttk.Label(self._browser_preview_frame, text="No data rows").pack(expand=True) + return + + n_cols = len(rows[0]) + + # Try to find column names from dbc_layouts.json + col_names: list[str] = [] + dbc_name = file_path.stem # e.g. "Spell" + for exp in ("wotlk", "tbc", "classic", "turtle"): + layout_path = ROOT_DIR / "Data" / "expansions" / exp / "dbc_layouts.json" + if layout_path.exists(): + try: + layouts = json.loads(layout_path.read_text(encoding="utf-8")) + if dbc_name in layouts: + mapping = layouts[dbc_name] + names = [""] * n_cols + for name, idx in mapping.items(): + if isinstance(idx, int) and 0 <= idx < n_cols: + names[idx] = name + col_names = [n if n else f"col_{i}" for i, n in enumerate(names)] + break + except (OSError, ValueError): + pass + + if not col_names: + col_names = [f"col_{i}" for i in range(n_cols)] + + # Info + info = f"{len(rows)} rows, {n_cols} columns" + if header_line: + info += f" ({header_line[:80]})" + ttk.Label(self._browser_preview_frame, text=info).pack(pady=(4, 2)) + + # Table frame with scrollbars + table_frame = ttk.Frame(self._browser_preview_frame) + table_frame.pack(fill="both", expand=True) + + xscroll = ttk.Scrollbar(table_frame, orient="horizontal") + yscroll = ttk.Scrollbar(table_frame, orient="vertical") + + col_ids = [f"c{i}" for i in range(n_cols)] + tree = ttk.Treeview( + table_frame, columns=col_ids, show="headings", + xscrollcommand=xscroll.set, yscrollcommand=yscroll.set + ) + xscroll.config(command=tree.xview) + yscroll.config(command=tree.yview) + + for i, cid in enumerate(col_ids): + name = col_names[i] if i < len(col_names) else f"col_{i}" + tree.heading(cid, text=name) + tree.column(cid, width=80, minwidth=40) + + tree.pack(side="left", fill="both", expand=True) + yscroll.pack(side="right", fill="y") + xscroll.pack(side="bottom", fill="x") + + self._browser_dbc_tree = tree + self._browser_dbc_col_ids = col_ids + self._browser_load_more_dbc(500) + + if len(rows) > 500: + btn = ttk.Button(self._browser_preview_frame, text="Load more rows...", command=lambda: self._browser_load_more_dbc(500)) + btn.pack(pady=4) + self._browser_dbc_more_btn = btn + + def _browser_load_more_dbc(self, count: int) -> None: + rows = self._browser_dbc_rows + start = self._browser_dbc_shown + end = min(start + count, len(rows)) + + tree = self._browser_dbc_tree + col_ids = self._browser_dbc_col_ids + n_cols = len(col_ids) + + for i in range(start, end): + row = rows[i] + values = row[:n_cols] + while len(values) < n_cols: + values.append("") + tree.insert("", "end", values=values) + + self._browser_dbc_shown = end + if end >= len(rows) and hasattr(self, "_browser_dbc_more_btn"): + self._browser_dbc_more_btn.configure(state="disabled", text="All rows loaded") + + # ── ADT Preview ── + + def _browser_preview_adt(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + try: + data = file_path.read_bytes() + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Read error: {exc}").pack(expand=True) + return + + # Parse MCNK chunks for height data + heights: list[list[float]] = [] # 16x16 chunks, each with avg height + pos = 0 + while pos < len(data) - 8: + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from("= 120: + # Base height at offset 112 in MCNK body + base_z = struct.unpack_from(" None: + canvas.delete("all") + cw = canvas.winfo_width() + ch = canvas.winfo_height() + if cw < 10 or ch < 10: + return + cell = min(cw, ch) // grid_size + + for i, h_list in enumerate(heights): + row = i // grid_size + col = i % grid_size + t = (h_list[0] - min_h) / h_range + # Green-brown colormap + r = int(50 + t * 150) + g = int(80 + (1 - t) * 120 + t * 50) + b = int(30 + t * 30) + color = f"#{r:02x}{g:02x}{b:02x}" + x1 = col * cell + y1 = row * cell + canvas.create_rectangle(x1, y1, x1 + cell, y1 + cell, fill=color, outline="") + + canvas.bind("", draw_heightmap) + canvas.after(50, draw_heightmap) + + # ── Text Preview ── + + def _browser_preview_text(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + try: + text = file_path.read_text(encoding="utf-8", errors="replace") + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Read error: {exc}").pack(expand=True) + return + + st = ScrolledText(self._browser_preview_frame, wrap="none", font=("Courier", 10)) + st.pack(fill="both", expand=True) + st.insert("1.0", text[:500000]) # Cap at 500k chars + st.configure(state="disabled") + + # ── Audio Preview ── + + def _browser_preview_audio(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + ext = self._browser_file_ext(path) + info_lines = [f"Audio file: {file_path.name}", f"Size: {self._format_size(entry.get('s', 0))}"] + + try: + data = file_path.read_bytes() + if ext == ".wav" and len(data) >= 44: + if data[:4] == b"RIFF" and data[8:12] == b"WAVE": + channels = struct.unpack_from("= 4: + info_lines.append("Format: MP3") + if data[:3] == b"ID3": + info_lines.append("Has ID3 tag") + elif ext == ".ogg" and len(data) >= 4: + if data[:4] == b"OggS": + info_lines.append("Format: Ogg Vorbis") + except Exception: + pass + + text = "\n".join(info_lines) + lbl = ttk.Label(self._browser_preview_frame, text=text, justify="left", anchor="nw") + lbl.pack(expand=True, padx=20, pady=20) + + # ── Hex Dump Preview ── + + def _browser_preview_hex(self, path: str, entry: dict) -> None: + file_path = self._browser_resolve_path(path) + if file_path is None: + ttk.Label(self._browser_preview_frame, text="File not found on disk").pack(expand=True) + return + + try: + data = file_path.read_bytes()[:512] + except Exception as exc: + ttk.Label(self._browser_preview_frame, text=f"Read error: {exc}").pack(expand=True) + return + + lines: list[str] = [] + for i in range(0, len(data), 16): + chunk = data[i:i + 16] + hex_part = " ".join(f"{b:02x}" for b in chunk) + ascii_part = "".join(chr(b) if 32 <= b < 127 else "." for b in chunk) + lines.append(f"{i:08x} {hex_part:<48s} {ascii_part}") + + ttk.Label(self._browser_preview_frame, text=f"Hex dump (first {len(data)} bytes):").pack(pady=(4, 2)) + st = ScrolledText(self._browser_preview_frame, wrap="none", font=("Courier", 10)) + st.pack(fill="both", expand=True) + st.insert("1.0", "\n".join(lines)) + st.configure(state="disabled") + + # ── End Asset Browser ────────────────────────────────────────────── + def _build_state_tab(self) -> None: actions = ttk.Frame(self.state_tab) actions.pack(fill="x") From 2a1bd12f5b648375e7e036f3a3781c545e435abf Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:41:06 -0800 Subject: [PATCH 06/10] Fix game objects rendering with player textures MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When M2Renderer's descriptor pool was exhausted, batch.materialSet would be VK_NULL_HANDLE and the bind was skipped, but the draw call still executed using the previously bound descriptor set from CharacterRenderer — causing game objects to render with the player's skin/armor textures. Skip the entire batch instead. --- src/rendering/m2_renderer.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/rendering/m2_renderer.cpp b/src/rendering/m2_renderer.cpp index 2cbc1188..378a7b41 100644 --- a/src/rendering/m2_renderer.cpp +++ b/src/rendering/m2_renderer.cpp @@ -2381,11 +2381,11 @@ void M2Renderer::render(VkCommandBuffer cmd, VkDescriptorSet perFrameSet, const } } - // Bind material descriptor set (set 1) - if (batch.materialSet) { - vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, - pipelineLayout_, 1, 1, &batch.materialSet, 0, nullptr); - } + // Bind material descriptor set (set 1) — skip batch if missing + // to avoid inheriting a stale descriptor set from a prior renderer + if (!batch.materialSet) continue; + vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, + pipelineLayout_, 1, 1, &batch.materialSet, 0, nullptr); // Push constants M2PushConstants pc; From 6e2d51b3257c52964f20e4cb162bb8c9829f903d Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:45:19 -0800 Subject: [PATCH 07/10] Fix asset browser hanging on launch with large manifests Manifest keys use backslashes but tree splitting used forward slashes, causing all 241k entries to land at root level. Combined with O(N) any(startswith) checks per entry, this produced an O(N^2) hang. Re-key manifest by the forward-slash 'p' field and build a directory index in a single O(N) pass so tree operations are O(1) lookups. --- tools/asset_pipeline_gui.py | 75 +++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 37 deletions(-) diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py index 0e87a7d8..d3065788 100755 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -568,38 +568,51 @@ class AssetPipelineGUI: try: doc = json.loads(manifest_path.read_text(encoding="utf-8")) entries = doc.get("entries", {}) - if isinstance(entries, dict): - self._browser_manifest = entries - self._browser_manifest_list = sorted(entries.keys(), key=str.lower) - self._browser_count_var.set(f"{len(entries)} entries") + if not isinstance(entries, dict): + self._browser_count_var.set("Invalid manifest format") + return except (OSError, ValueError, TypeError) as exc: self._browser_count_var.set(f"Manifest error: {exc}") return + # Re-key manifest by the 'p' field (forward-slash paths) for tree display + self._browser_manifest = {} + for _key, val in entries.items(): + display_path = val.get("p", _key).replace("\\", "/") + self._browser_manifest[display_path] = val + self._browser_manifest_list = sorted(self._browser_manifest.keys(), key=str.lower) + self._browser_count_var.set(f"{len(self._browser_manifest)} entries") + + # Build directory tree index: dir_path -> ({subdirs}, [files]) + # Single O(N) pass so tree operations are O(1) lookups + self._browser_dir_index: dict[str, tuple[set[str], list[str]]] = {} + for path in self._browser_manifest_list: + parts = path.split("/") + for depth in range(len(parts)): + dir_key = "/".join(parts[:depth]) if depth > 0 else "" + if dir_key not in self._browser_dir_index: + self._browser_dir_index[dir_key] = (set(), []) + idx_entry = self._browser_dir_index[dir_key] + if depth < len(parts) - 1: + idx_entry[0].add(parts[depth]) + else: + idx_entry[1].append(parts[depth]) + self._browser_populate_tree_root() def _browser_populate_tree_root(self) -> None: self._browser_tree.delete(*self._browser_tree.get_children()) self._browser_tree_populated.clear() - # Build top-level directories - top_dirs: set[str] = set() - for path in self._browser_manifest_list: - parts = path.split("/") - if len(parts) > 1: - top_dirs.add(parts[0]) - else: - top_dirs.add(path) + root_entry = self._browser_dir_index.get("", (set(), [])) + subdirs, files = root_entry - for name in sorted(top_dirs, key=str.lower): - # Check if this is a directory (has children) or a file - is_dir = any(p.startswith(name + "/") for p in self._browser_manifest_list) - if is_dir: - node = self._browser_tree.insert("", "end", iid=name, text=name, open=False) - # Insert dummy child for lazy loading - self._browser_tree.insert(node, "end", iid=name + "/__dummy__", text="") - else: - self._browser_tree.insert("", "end", iid=name, text=name) + for name in sorted(subdirs, key=str.lower): + node = self._browser_tree.insert("", "end", iid=name, text=name, open=False) + self._browser_tree.insert(node, "end", iid=name + "/__dummy__", text="") + + for name in sorted(files, key=str.lower): + self._browser_tree.insert("", "end", iid=name, text=name) def _browser_on_expand(self, event: Any) -> None: node = self._browser_tree.focus() @@ -612,29 +625,17 @@ class AssetPipelineGUI: if self._browser_tree.exists(dummy): self._browser_tree.delete(dummy) - prefix = node + "/" - # Collect immediate children - child_dirs: set[str] = set() - child_files: list[str] = [] - - for path in self._browser_manifest_list: - if not path.startswith(prefix): - continue - remainder = path[len(prefix):] - parts = remainder.split("/") - if len(parts) > 1: - child_dirs.add(parts[0]) - else: - child_files.append(parts[0]) + dir_entry = self._browser_dir_index.get(node, (set(), [])) + child_dirs, child_files = dir_entry for d in sorted(child_dirs, key=str.lower): - child_id = prefix + d + child_id = node + "/" + d if not self._browser_tree.exists(child_id): n = self._browser_tree.insert(node, "end", iid=child_id, text=d, open=False) self._browser_tree.insert(n, "end", iid=child_id + "/__dummy__", text="") for f in sorted(child_files, key=str.lower): - child_id = prefix + f + child_id = node + "/" + f if not self._browser_tree.exists(child_id): self._browser_tree.insert(node, "end", iid=child_id, text=f) From edf0a40759a9d525b8a6a851f4a5bfb2d6a800a1 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 20:51:33 -0800 Subject: [PATCH 08/10] Fix asset browser BLP errors, M2 wireframes, and add anim filter - BLP: blp_convert takes one arg, not two; was passing an output path that caused conversion failures - M2: vertex header offsets were wrong (used 80/100 instead of 60/68), producing garbage vertex counts that failed the sanity check - Add "Hide .anim/.skin" checkbox (on by default) to filter ~30k companion files from the directory tree --- tools/asset_pipeline_gui.py | 76 ++++++++++++++++++++++--------------- 1 file changed, 46 insertions(+), 30 deletions(-) diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py index d3065788..24a94d4b 100755 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -521,7 +521,11 @@ class AssetPipelineGUI: type_combo.pack(side="left", padx=(4, 8)) ttk.Button(top_bar, text="Search", command=self._browser_do_search).pack(side="left", padx=(0, 4)) - ttk.Button(top_bar, text="Reset", command=self._browser_reset_search).pack(side="left") + ttk.Button(top_bar, text="Reset", command=self._browser_reset_search).pack(side="left", padx=(0, 8)) + + self._browser_hide_anim_var = tk.BooleanVar(value=True) + ttk.Checkbutton(top_bar, text="Hide .anim/.skin", variable=self._browser_hide_anim_var, + command=self._browser_reset_search).pack(side="left") self._browser_count_var = tk.StringVar(value="") ttk.Label(top_bar, textvariable=self._browser_count_var).pack(side="right") @@ -583,28 +587,42 @@ class AssetPipelineGUI: self._browser_manifest_list = sorted(self._browser_manifest.keys(), key=str.lower) self._browser_count_var.set(f"{len(self._browser_manifest)} entries") - # Build directory tree index: dir_path -> ({subdirs}, [files]) + # Build directory tree indices: one full, one filtered # Single O(N) pass so tree operations are O(1) lookups - self._browser_dir_index: dict[str, tuple[set[str], list[str]]] = {} - for path in self._browser_manifest_list: + _hidden_exts = {".anim", ".skin"} + self._browser_dir_index_full = self._build_dir_index(self._browser_manifest_list) + filtered = [p for p in self._browser_manifest_list + if os.path.splitext(p)[1].lower() not in _hidden_exts] + self._browser_dir_index_filtered = self._build_dir_index(filtered) + + self._browser_populate_tree_root() + + @staticmethod + def _build_dir_index(paths: list[str]) -> dict[str, tuple[set[str], list[str]]]: + index: dict[str, tuple[set[str], list[str]]] = {} + for path in paths: parts = path.split("/") for depth in range(len(parts)): dir_key = "/".join(parts[:depth]) if depth > 0 else "" - if dir_key not in self._browser_dir_index: - self._browser_dir_index[dir_key] = (set(), []) - idx_entry = self._browser_dir_index[dir_key] + if dir_key not in index: + index[dir_key] = (set(), []) + entry = index[dir_key] if depth < len(parts) - 1: - idx_entry[0].add(parts[depth]) + entry[0].add(parts[depth]) else: - idx_entry[1].append(parts[depth]) + entry[1].append(parts[depth]) + return index - self._browser_populate_tree_root() + def _browser_active_index(self) -> dict[str, tuple[set[str], list[str]]]: + if self._browser_hide_anim_var.get(): + return self._browser_dir_index_filtered + return self._browser_dir_index_full def _browser_populate_tree_root(self) -> None: self._browser_tree.delete(*self._browser_tree.get_children()) self._browser_tree_populated.clear() - root_entry = self._browser_dir_index.get("", (set(), [])) + root_entry = self._browser_active_index().get("", (set(), [])) subdirs, files = root_entry for name in sorted(subdirs, key=str.lower): @@ -625,7 +643,7 @@ class AssetPipelineGUI: if self._browser_tree.exists(dummy): self._browser_tree.delete(dummy) - dir_entry = self._browser_dir_index.get(node, (set(), [])) + dir_entry = self._browser_active_index().get(node, (set(), [])) child_dirs, child_files = dir_entry for d in sorted(child_dirs, key=str.lower): @@ -665,13 +683,15 @@ class AssetPipelineGUI: "Text": {".xml", ".lua", ".json", ".html", ".toc", ".txt", ".wtf"}, } + hidden_exts = {".anim", ".skin"} if self._browser_hide_anim_var.get() else set() results: list[str] = [] exts = type_exts.get(type_filter) for path in self._browser_manifest_list: - if exts: - ext = os.path.splitext(path)[1].lower() - if ext not in exts: - continue + ext = os.path.splitext(path)[1].lower() + if ext in hidden_exts: + continue + if exts and ext not in exts: + continue if query and query not in path.lower(): continue results.append(path) @@ -791,22 +811,17 @@ class AssetPipelineGUI: cached_png = cache_dir / f"{cache_key}.png" if not cached_png.exists(): - # Convert BLP to PNG + # blp_convert outputs PNG alongside source: foo.blp -> foo.png try: - with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp: - tmp_path = tmp.name result = subprocess.run( - [str(blp_convert), "--to-png", str(file_path), tmp_path], + [str(blp_convert), "--to-png", str(file_path)], capture_output=True, text=True, timeout=10 ) - if result.returncode != 0: + output_png = file_path.with_suffix(".png") + if result.returncode != 0 or not output_png.exists(): ttk.Label(self._browser_preview_frame, text=f"blp_convert failed:\n{result.stderr[:500]}").pack(expand=True) - try: - os.unlink(tmp_path) - except OSError: - pass return - shutil.move(tmp_path, cached_png) + shutil.move(str(output_png), cached_png) except Exception as exc: ttk.Label(self._browser_preview_frame, text=f"Conversion error: {exc}").pack(expand=True) return @@ -858,12 +873,13 @@ class AssetPipelineGUI: version = struct.unpack_from(" 500000 or ofs_verts + n_verts * 48 > len(data): ttk.Label(self._browser_preview_frame, text=f"M2: {n_verts} vertices (no preview)").pack(expand=True) From 55faacef96a893ea0a4e9c9d06bf1cf78d746312 Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 22:22:39 -0800 Subject: [PATCH 09/10] Add M2/WMO 3D viewer with textured rendering, animation, and audio playback - New tools/m2_viewer.py: Pygame/OpenGL viewer for M2 models (textured rendering, skeletal animation, orbit camera) and WMO buildings - M2 viewer: per-batch texture mapping, CPU vertex skinning, animation playback with play/pause/speed controls, wireframe overlay toggle - WMO viewer: root+group file parsing (MOTX/MOMT/MOVT/MOVI/MONR/MOTV/MOBA), per-batch material rendering with BLP textures - Asset browser: "Open 3D Viewer" buttons for M2 and WMO previews, audio Play/Stop buttons using pygame.mixer in subprocess - Handles both WotLK (v264) and Vanilla (v256) M2 formats --- tools/asset_pipeline_gui.py | 422 ++++++- tools/m2_viewer.py | 2170 +++++++++++++++++++++++++++++++++++ 2 files changed, 2579 insertions(+), 13 deletions(-) create mode 100644 tools/m2_viewer.py diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py index 24a94d4b..c784ef35 100755 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -41,6 +41,20 @@ PIPELINE_DIR = ROOT_DIR / "asset_pipeline" STATE_FILE = PIPELINE_DIR / "state.json" +def _audio_subprocess(file_path: str) -> None: + """Play an audio file using pygame.mixer in a subprocess.""" + try: + import pygame + pygame.mixer.init(frequency=44100, size=-16, channels=2, buffer=2048) + pygame.mixer.music.load(file_path) + pygame.mixer.music.play() + while pygame.mixer.music.get_busy(): + pygame.time.wait(100) + pygame.mixer.quit() + except Exception: + pass + + @dataclass class PackInfo: pack_id: str @@ -487,6 +501,7 @@ class AssetPipelineGUI: def _build_browser_tab(self) -> None: self._browser_manifest: dict[str, dict] = {} + self._browser_manifest_lc: dict[str, str] = {} self._browser_manifest_list: list[str] = [] self._browser_tree_populated: set[str] = set() self._browser_photo: Any = None # prevent GC of PhotoImage @@ -587,6 +602,11 @@ class AssetPipelineGUI: self._browser_manifest_list = sorted(self._browser_manifest.keys(), key=str.lower) self._browser_count_var.set(f"{len(self._browser_manifest)} entries") + # Build case-insensitive lookup: lowercase forward-slash path -> actual manifest path + self._browser_manifest_lc: dict[str, str] = {} + for p in self._browser_manifest: + self._browser_manifest_lc[p.lower()] = p + # Build directory tree indices: one full, one filtered # Single O(N) pass so tree operations are O(1) lookups _hidden_exts = {".anim", ".skin"} @@ -851,7 +871,147 @@ class AssetPipelineGUI: except Exception as exc: ttk.Label(self._browser_preview_frame, text=f"Image load error: {exc}").pack(expand=True) - # ── M2 Wireframe Preview ── + # ── M2 Preview (wireframe + textures + animations) ── + + # Common animation ID names + _ANIM_NAMES: dict[int, str] = { + 0: "Stand", 1: "Death", 2: "Spell", 3: "Stop", 4: "Walk", 5: "Run", + 6: "Dead", 7: "Rise", 8: "StandWound", 9: "CombatWound", 10: "CombatCritical", + 11: "ShuffleLeft", 12: "ShuffleRight", 13: "Walkbackwards", 14: "Stun", + 15: "HandsClosed", 16: "AttackUnarmed", 17: "Attack1H", 18: "Attack2H", + 19: "Attack2HL", 20: "ParryUnarmed", 21: "Parry1H", 22: "Parry2H", + 23: "Parry2HL", 24: "ShieldBlock", 25: "ReadyUnarmed", 26: "Ready1H", + 27: "Ready2H", 28: "Ready2HL", 29: "ReadyBow", 30: "Dodge", + 31: "SpellPrecast", 32: "SpellCast", 33: "SpellCastArea", + 34: "NPCWelcome", 35: "NPCGoodbye", 36: "Block", 37: "JumpStart", + 38: "Jump", 39: "JumpEnd", 40: "Fall", 41: "SwimIdle", 42: "Swim", + 43: "SwimLeft", 44: "SwimRight", 45: "SwimBackwards", + 60: "SpellChannelDirected", 61: "SpellChannelOmni", + 69: "CombatAbility", 70: "CombatAbility2H", + 94: "Kneel", 113: "Loot", + 135: "ReadyRifle", 138: "Fly", 143: "CustomSpell01", + 157: "EmoteTalk", 185: "FlyIdle", + } + + # Texture type names for non-filename textures + _TEX_TYPE_NAMES: dict[int, str] = { + 0: "Filename", 1: "Body/Skin", 2: "Object Skin", 3: "Weapon Blade", + 4: "Weapon Handle", 5: "Environment", 6: "Hair", 7: "Facial Hair", + 8: "Skin Extra", 9: "UI Skin", 10: "Tauren Mane", 11: "Monster Skin 1", + 12: "Monster Skin 2", 13: "Monster Skin 3", 14: "Item Icon", + } + + def _browser_parse_m2_textures(self, data: bytes, version: int) -> list[dict]: + """Parse M2 texture definitions. Returns list of {type, flags, filename}.""" + if version <= 256: + ofs = 92 + else: + ofs = 80 + + if len(data) < ofs + 8: + return [] + + n_tex, ofs_tex = struct.unpack_from(" 1000 or ofs_tex + n_tex * 16 > len(data): + return [] + + textures = [] + for i in range(n_tex): + base = ofs_tex + i * 16 + tex_type, tex_flags = struct.unpack_from(" 1 and name_ofs + name_len <= len(data): + raw = data[name_ofs:name_ofs + name_len] + filename = raw.split(b"\x00", 1)[0].decode("ascii", errors="replace") + textures.append({"type": tex_type, "flags": tex_flags, "filename": filename}) + return textures + + def _browser_parse_m2_animations(self, data: bytes, version: int) -> list[dict]: + """Parse M2 animation sequences. Returns list of {id, variation, duration, speed, flags}.""" + if len(data) < 36: + return [] + + n_anim, ofs_anim = struct.unpack_from(" 5000: + return [] + + seq_size = 68 if version <= 256 else 64 + if ofs_anim + n_anim * seq_size > len(data): + return [] + + anims = [] + for i in range(n_anim): + base = ofs_anim + i * seq_size + anim_id, variation = struct.unpack_from(" Path | None: + """Resolve a BLP filename from M2 texture to a filesystem path, case-insensitively.""" + # Normalize: backslash -> forward slash + normalized = blp_name.replace("\\", "/") + lc = normalized.lower() + + # Try direct manifest lookup + actual = self._browser_manifest_lc.get(lc) + if actual: + return self._browser_resolve_path(actual) + + # Try without leading slash + if lc.startswith("/"): + actual = self._browser_manifest_lc.get(lc[1:]) + if actual: + return self._browser_resolve_path(actual) + + return None + + def _browser_load_blp_thumbnail(self, blp_path: Path, size: int = 64) -> Any: + """Convert BLP to PNG and return a PhotoImage thumbnail, or None.""" + if not HAS_PILLOW: + return None + + blp_convert = ROOT_DIR / "build" / "bin" / "blp_convert" + if not blp_convert.exists(): + return None + + cache_dir = PIPELINE_DIR / "preview_cache" + cache_dir.mkdir(parents=True, exist_ok=True) + cache_key = hashlib.md5(str(blp_path).encode()).hexdigest() + cached_png = cache_dir / f"{cache_key}.png" + + if not cached_png.exists(): + try: + result = subprocess.run( + [str(blp_convert), "--to-png", str(blp_path)], + capture_output=True, text=True, timeout=10, + ) + output_png = blp_path.with_suffix(".png") + if result.returncode != 0 or not output_png.exists(): + return None + shutil.move(str(output_png), cached_png) + except Exception: + return None + + try: + img = Image.open(cached_png) + img.thumbnail((size, size), Image.LANCZOS) + return ImageTk.PhotoImage(img) + except Exception: + return None def _browser_preview_m2(self, path: str, entry: dict) -> None: file_path = self._browser_resolve_path(path) @@ -872,10 +1032,7 @@ class AssetPipelineGUI: version = struct.unpack_from("", self._browser_wf_mouse_down) + canvas.bind("", self._browser_wf_mouse_drag) + canvas.bind("", self._browser_wf_scroll) + canvas.bind("", lambda e: self._browser_wf_scroll_linux(e, 1)) + canvas.bind("", lambda e: self._browser_wf_scroll_linux(e, -1)) + canvas.bind("", lambda e: self._browser_wf_render()) + self.root.after(50, self._browser_wf_render) + + # Right: textures + animations sidebar + right_frame = ttk.Frame(main_pane) + main_pane.add(right_frame, weight=1) + + # --- Textures section --- + ttk.Label(right_frame, text="Textures", font=("", 10, "bold")).pack(anchor="w", pady=(4, 2)) + + # Keep references to thumbnail PhotoImages to prevent GC + self._browser_m2_thumbs: list[Any] = [] + + if textures: + tex_frame = ttk.Frame(right_frame) + tex_frame.pack(fill="x", padx=2) + + for i, tex in enumerate(textures): + row_frame = ttk.Frame(tex_frame) + row_frame.pack(fill="x", pady=1) + + tex_type = tex["type"] + filename = tex["filename"] + + if tex_type == 0 and filename: + # Try to load BLP thumbnail + display_name = filename.replace("\\", "/").split("/")[-1] + blp_fs_path = self._browser_resolve_blp_path(filename) + thumb = None + if blp_fs_path: + thumb = self._browser_load_blp_thumbnail(blp_fs_path) + + if thumb: + self._browser_m2_thumbs.append(thumb) + lbl_img = ttk.Label(row_frame, image=thumb) + lbl_img.pack(side="left", padx=(0, 4)) + + lbl_text = ttk.Label(row_frame, text=display_name, wraplength=180) + lbl_text.pack(side="left", fill="x") + else: + type_name = self._TEX_TYPE_NAMES.get(tex_type, f"Type {tex_type}") + lbl = ttk.Label(row_frame, text=f"[{type_name}]", foreground="#888") + lbl.pack(side="left") + else: + ttk.Label(right_frame, text="(none)", foreground="#888").pack(anchor="w") + + # --- Separator --- + ttk.Separator(right_frame, orient="horizontal").pack(fill="x", pady=6) + + # --- Animations section --- + ttk.Label(right_frame, text="Animations", font=("", 10, "bold")).pack(anchor="w", pady=(0, 2)) + + if animations: + anim_frame = ttk.Frame(right_frame) + anim_frame.pack(fill="both", expand=True) + + anim_scroll = ttk.Scrollbar(anim_frame, orient="vertical") + anim_tree = ttk.Treeview( + anim_frame, columns=("id", "name", "var", "dur", "spd"), + show="headings", height=8, + yscrollcommand=anim_scroll.set, + ) + anim_scroll.config(command=anim_tree.yview) + + anim_tree.heading("id", text="ID") + anim_tree.heading("name", text="Name") + anim_tree.heading("var", text="Var") + anim_tree.heading("dur", text="Dur(ms)") + anim_tree.heading("spd", text="Speed") + + anim_tree.column("id", width=35, minwidth=30) + anim_tree.column("name", width=90, minwidth=60) + anim_tree.column("var", width=30, minwidth=25) + anim_tree.column("dur", width=55, minwidth=40) + anim_tree.column("spd", width=45, minwidth=35) + + for anim in animations: + aid = anim["id"] + name = self._ANIM_NAMES.get(aid, "") + anim_tree.insert("", "end", values=( + aid, name, anim["variation"], + anim["duration"], f"{anim['speed']:.1f}", + )) + + anim_tree.pack(side="left", fill="both", expand=True) + anim_scroll.pack(side="right", fill="y") + else: + ttk.Label(right_frame, text="(none)", foreground="#888").pack(anchor="w") except Exception as exc: ttk.Label(self._browser_preview_frame, text=f"M2 parse error: {exc}").pack(expand=True) @@ -995,8 +1292,74 @@ class AssetPipelineGUI: self._browser_el = 0.3 self._browser_zoom = 1.0 + top_bar = ttk.Frame(self._browser_preview_frame) + top_bar.pack(fill="x", pady=(4, 2)) + info = f"WMO: {len(verts)} vertices, {len(tris)} triangles" - ttk.Label(self._browser_preview_frame, text=info).pack(pady=(4, 2)) + ttk.Label(top_bar, text=info).pack(side="left", fill="x", expand=True) + + def _open_wmo_viewer(fp=file_path, ig=is_group): + blp_convert = ROOT_DIR / "build" / "bin" / "blp_convert" + if not blp_convert.exists(): + messagebox.showerror("Error", "blp_convert not found in build/bin/") + return + # Determine root and group files + if ig: + stem = fp.stem + root_stem = stem.rsplit("_", 1)[0] + root_path = fp.parent / f"{root_stem}.wmo" + groups = sorted(fp.parent.glob(f"{root_stem}_*.wmo")) + else: + root_path = fp + groups = sorted(fp.parent.glob(f"{fp.stem}_*.wmo")) + # Parse root for texture names, resolve BLP paths + blp_map: dict[str, str] = {} + if root_path.exists(): + import struct as _st + rdata = root_path.read_bytes() + pos = 0 + while pos + 8 <= len(rdata): + cid = rdata[pos:pos + 4] + csz = _st.unpack_from(" len(rdata): + break + tag = cid if cid[:1].isupper() else cid[::-1] + if tag == b"MOTX": + off = 0 + while off < csz: + end = rdata.find(b"\x00", cs + off, ce) + if end < 0: + break + s = rdata[cs + off:end].decode("ascii", errors="replace") + if s: + resolved = self._browser_resolve_blp_path(s) + if resolved: + norm = s.replace("\\", "/") + blp_map[norm] = str(resolved) + blp_map[norm.lower()] = str(resolved) + off = end - cs + 1 + else: + off += 1 + break + pos = ce + try: + from tools.m2_viewer import launch_wmo_viewer + launch_wmo_viewer( + str(root_path) if root_path.exists() else "", + [str(g) for g in groups], + blp_map, str(blp_convert)) + except ImportError: + try: + from m2_viewer import launch_wmo_viewer as lwv + lwv(str(root_path) if root_path.exists() else "", + [str(g) for g in groups], blp_map, str(blp_convert)) + except ImportError: + messagebox.showerror("Error", "m2_viewer.py not found. Requires pygame, PyOpenGL, numpy, Pillow.") + + ttk.Button(top_bar, text="Open 3D Viewer", command=_open_wmo_viewer).pack(side="right", padx=4) + self._browser_create_wireframe_canvas() except Exception as exc: @@ -1423,7 +1786,40 @@ class AssetPipelineGUI: text = "\n".join(info_lines) lbl = ttk.Label(self._browser_preview_frame, text=text, justify="left", anchor="nw") - lbl.pack(expand=True, padx=20, pady=20) + lbl.pack(padx=20, pady=(20, 8)) + + # Audio playback controls + btn_frame = ttk.Frame(self._browser_preview_frame) + btn_frame.pack(padx=20, pady=4) + + self._audio_status_var = tk.StringVar(value="Stopped") + status_lbl = ttk.Label(self._browser_preview_frame, textvariable=self._audio_status_var) + status_lbl.pack(padx=20, pady=(4, 0)) + + def _play_audio(): + self._browser_stop_audio() + try: + import multiprocessing + self._audio_proc = multiprocessing.Process( + target=_audio_subprocess, args=(str(file_path),), daemon=True) + self._audio_proc.start() + self._audio_status_var.set("Playing...") + except Exception as exc: + self._audio_status_var.set(f"Error: {exc}") + + def _stop_audio(): + self._browser_stop_audio() + self._audio_status_var.set("Stopped") + + ttk.Button(btn_frame, text="Play", command=_play_audio).pack(side="left", padx=4) + ttk.Button(btn_frame, text="Stop", command=_stop_audio).pack(side="left", padx=4) + + def _browser_stop_audio(self): + proc = getattr(self, "_audio_proc", None) + if proc and proc.is_alive(): + proc.terminate() + proc.join(timeout=1) + self._audio_proc = None # ── Hex Dump Preview ── diff --git a/tools/m2_viewer.py b/tools/m2_viewer.py new file mode 100644 index 00000000..8648948f --- /dev/null +++ b/tools/m2_viewer.py @@ -0,0 +1,2170 @@ +#!/usr/bin/env python3 +"""Self-contained Pygame/OpenGL M2 model viewer. + +Launched as a subprocess from the asset pipeline GUI to avoid Tkinter/Pygame conflicts. +Supports textured rendering, skeletal animation playback, and orbit camera controls. +""" + +from __future__ import annotations + +import hashlib +import math +import multiprocessing +import os +import shutil +import struct +import subprocess +import sys +import time +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +import numpy as np + +# --------------------------------------------------------------------------- +# Matrix math utilities (pure NumPy, no external 3D lib needed) +# --------------------------------------------------------------------------- + +def perspective(fov_deg: float, aspect: float, near: float, far: float) -> np.ndarray: + f = 1.0 / math.tan(math.radians(fov_deg) / 2.0) + m = np.zeros((4, 4), dtype=np.float32) + m[0, 0] = f / aspect + m[1, 1] = f + m[2, 2] = (far + near) / (near - far) + m[2, 3] = (2.0 * far * near) / (near - far) + m[3, 2] = -1.0 + return m + + +def look_at(eye: np.ndarray, target: np.ndarray, up: np.ndarray) -> np.ndarray: + f = target - eye + f = f / np.linalg.norm(f) + s = np.cross(f, up) + s = s / (np.linalg.norm(s) + 1e-12) + u = np.cross(s, f) + m = np.eye(4, dtype=np.float32) + m[0, :3] = s + m[1, :3] = u + m[2, :3] = -f + m[0, 3] = -np.dot(s, eye) + m[1, 3] = -np.dot(u, eye) + m[2, 3] = np.dot(f, eye) + return m + + +def translate(tx: float, ty: float, tz: float) -> np.ndarray: + m = np.eye(4, dtype=np.float32) + m[0, 3] = tx + m[1, 3] = ty + m[2, 3] = tz + return m + + +def scale_mat4(sx: float, sy: float, sz: float) -> np.ndarray: + m = np.eye(4, dtype=np.float32) + m[0, 0] = sx + m[1, 1] = sy + m[2, 2] = sz + return m + + +def quat_to_mat4(q: np.ndarray) -> np.ndarray: + """Quaternion (x,y,z,w) to 4x4 rotation matrix.""" + x, y, z, w = q + m = np.eye(4, dtype=np.float32) + m[0, 0] = 1 - 2 * (y * y + z * z) + m[0, 1] = 2 * (x * y - z * w) + m[0, 2] = 2 * (x * z + y * w) + m[1, 0] = 2 * (x * y + z * w) + m[1, 1] = 1 - 2 * (x * x + z * z) + m[1, 2] = 2 * (y * z - x * w) + m[2, 0] = 2 * (x * z - y * w) + m[2, 1] = 2 * (y * z + x * w) + m[2, 2] = 1 - 2 * (x * x + y * y) + return m + + +def slerp(q0: np.ndarray, q1: np.ndarray, t: float) -> np.ndarray: + dot = np.dot(q0, q1) + if dot < 0: + q1 = -q1 + dot = -dot + dot = min(dot, 1.0) + if dot > 0.9995: + result = q0 + t * (q1 - q0) + return result / np.linalg.norm(result) + theta = math.acos(dot) + sin_theta = math.sin(theta) + a = math.sin((1 - t) * theta) / sin_theta + b = math.sin(t * theta) / sin_theta + result = a * q0 + b * q1 + return result / np.linalg.norm(result) + + +# --------------------------------------------------------------------------- +# M2 Parser +# --------------------------------------------------------------------------- + +@dataclass +class M2Track: + """Parsed animation track with per-sequence timestamps and keyframes.""" + interp: int = 0 + global_sequence: int = -1 + timestamps: list[np.ndarray] = field(default_factory=list) # list of uint32 arrays per seq + keys: list[np.ndarray] = field(default_factory=list) # list of value arrays per seq + + +@dataclass +class M2Bone: + key_bone_id: int = -1 + flags: int = 0 + parent: int = -1 + pivot: np.ndarray = field(default_factory=lambda: np.zeros(3, dtype=np.float32)) + translation: M2Track = field(default_factory=M2Track) + rotation: M2Track = field(default_factory=M2Track) + scale: M2Track = field(default_factory=M2Track) + + +@dataclass +class M2Submesh: + vertex_start: int = 0 + vertex_count: int = 0 + index_start: int = 0 + index_count: int = 0 + + +@dataclass +class M2Batch: + submesh_index: int = 0 + texture_combo_index: int = 0 + + +@dataclass +class M2Animation: + anim_id: int = 0 + variation: int = 0 + duration: int = 0 + speed: float = 0.0 + flags: int = 0 + + +class M2Parser: + """Parse M2 binary data for rendering: vertices, UVs, normals, bones, skins, textures.""" + + def __init__(self, data: bytes): + self.data = data + self.version = struct.unpack_from(" int: + """Return header offset for a given field, version-gated.""" + offsets_wotlk = { + "nGlobalSeq": 20, "ofsGlobalSeq": 24, + "nAnims": 28, "ofsAnims": 32, + "nBones": 44, "ofsBones": 48, + "nVerts": 60, "ofsVerts": 64, + "nTextures": 80, "ofsTextures": 84, + "nTextureLookup": 128, "ofsTextureLookup": 132, + "nBoneLookup": 120, "ofsBoneLookup": 124, + } + offsets_vanilla = { + "nGlobalSeq": 20, "ofsGlobalSeq": 24, + "nAnims": 28, "ofsAnims": 32, + "nBones": 52, "ofsBones": 56, + "nVerts": 68, "ofsVerts": 72, + "nTextures": 92, "ofsTextures": 96, + "nTextureLookup": 148, "ofsTextureLookup": 152, + "nBoneLookup": 140, "ofsBoneLookup": 144, + } + table = offsets_vanilla if self.is_vanilla else offsets_wotlk + return table[field_name] + + def _read_u32(self, offset: int) -> int: + return struct.unpack_from(" tuple[int, int]: + """Read count, offset for an M2Array header field.""" + n_off = self._hdr(f"n{field_name}") + o_off = self._hdr(f"ofs{field_name}") + n = self._read_u32(n_off) + o = self._read_u32(o_off) + return n, o + + def _parse(self): + self._parse_global_sequences() + self._parse_vertices() + self._parse_textures() + self._parse_texture_lookup() + self._parse_bone_lookup() + self._parse_animations() + self._parse_bones() + self._parse_skin() + + def _parse_global_sequences(self): + n, ofs = self._read_m2array("GlobalSeq") + if n == 0 or n > 10000 or ofs + n * 4 > len(self.data): + return + self.global_sequences = list(struct.unpack_from(f"<{n}I", self.data, ofs)) + + def _parse_vertices(self): + n, ofs = self._read_m2array("Verts") + if n == 0 or n > 500000 or ofs + n * 48 > len(self.data): + return + + # Parse all vertex fields using numpy for speed + positions = np.empty((n, 3), dtype=np.float32) + normals = np.empty((n, 3), dtype=np.float32) + uvs = np.empty((n, 2), dtype=np.float32) + bone_weights = np.empty((n, 4), dtype=np.uint8) + bone_indices = np.empty((n, 4), dtype=np.uint8) + + for i in range(n): + base = ofs + i * 48 + positions[i] = struct.unpack_from("<3f", self.data, base) + bone_weights[i] = struct.unpack_from("<4B", self.data, base + 12) + bone_indices[i] = struct.unpack_from("<4B", self.data, base + 16) + normals[i] = struct.unpack_from("<3f", self.data, base + 20) + uvs[i] = struct.unpack_from("<2f", self.data, base + 32) + + self.positions = positions + self.normals = normals + self.uvs = uvs + self.bone_weights = bone_weights + self.bone_indices = bone_indices + + def _parse_textures(self): + n, ofs = self._read_m2array("Textures") + if n == 0 or n > 1000 or ofs + n * 16 > len(self.data): + return + for i in range(n): + base = ofs + i * 16 + tex_type, tex_flags = struct.unpack_from(" 1 and name_ofs + name_len <= len(self.data): + raw = self.data[name_ofs:name_ofs + name_len] + filename = raw.split(b"\x00", 1)[0].decode("ascii", errors="replace") + self.textures.append({"type": tex_type, "flags": tex_flags, "filename": filename}) + + def _parse_texture_lookup(self): + n, ofs = self._read_m2array("TextureLookup") + if n == 0 or n > 10000 or ofs + n * 2 > len(self.data): + return + self.texture_lookup = list(struct.unpack_from(f"<{n}H", self.data, ofs)) + + def _parse_bone_lookup(self): + n, ofs = self._read_m2array("BoneLookup") + if n == 0 or n > 10000 or ofs + n * 2 > len(self.data): + return + self.bone_lookup = list(struct.unpack_from(f"<{n}H", self.data, ofs)) + + def _parse_animations(self): + n, ofs = self._read_m2array("Anims") + if n == 0 or n > 5000: + return + seq_size = 68 if self.is_vanilla else 64 + if ofs + n * seq_size > len(self.data): + return + for i in range(n): + base = ofs + i * seq_size + anim_id, variation = struct.unpack_from(" M2Track: + """Parse a WotLK M2TrackDisk (20 bytes) at given offset.""" + track = M2Track() + if base + 20 > len(self.data): + return track + interp, global_seq = struct.unpack_from(" 5000 or n_keys > 5000: + return track + + # Each entry in n_ts is a sub-array header: {count(4), offset(4)} + for s in range(n_ts): + ts_hdr = ofs_ts + s * 8 + if ts_hdr + 8 > len(self.data): + track.timestamps.append(np.empty(0, dtype=np.uint32)) + continue + sub_count, sub_ofs = struct.unpack_from(" 50000 or sub_ofs + sub_count * 4 > len(self.data): + track.timestamps.append(np.empty(0, dtype=np.uint32)) + continue + ts_data = np.frombuffer(self.data, dtype=np.uint32, count=sub_count, offset=sub_ofs) + track.timestamps.append(ts_data.copy()) + + for s in range(n_keys): + key_hdr = ofs_keys + s * 8 + if key_hdr + 8 > len(self.data): + track.keys.append(np.empty(0, dtype=np.float32)) + continue + sub_count, sub_ofs = struct.unpack_from(" 50000 or sub_ofs + sub_count * key_size > len(self.data): + track.keys.append(np.empty(0, dtype=np.float32)) + continue + if key_dtype == "compressed_quat": + raw = np.frombuffer(self.data, dtype=np.int16, count=sub_count * 4, offset=sub_ofs) + raw = raw.reshape(sub_count, 4).astype(np.float32) + # Decompress: (v < 0 ? v+32768 : v-32767) / 32767.0 + result = np.where(raw < 0, raw + 32768.0, raw - 32767.0) / 32767.0 + # Normalize each quaternion + norms = np.linalg.norm(result, axis=1, keepdims=True) + norms = np.maximum(norms, 1e-10) + result = result / norms + track.keys.append(result) + elif key_dtype == "vec3": + vals = np.frombuffer(self.data, dtype=np.float32, count=sub_count * 3, offset=sub_ofs) + track.keys.append(vals.reshape(sub_count, 3).copy()) + elif key_dtype == "float": + vals = np.frombuffer(self.data, dtype=np.float32, count=sub_count, offset=sub_ofs) + track.keys.append(vals.copy()) + + return track + + def _parse_track_vanilla(self, base: int, key_size: int, key_dtype: str) -> M2Track: + """Parse a Vanilla M2TrackDiskVanilla (28 bytes) — flat arrays with M2Range indexing.""" + track = M2Track() + if base + 28 > len(self.data): + return track + interp, global_seq = struct.unpack_from(" 500000 or n_keys > 500000: + return track + + # Read flat timestamp array + all_ts = np.empty(0, dtype=np.uint32) + if n_ts > 0 and ofs_ts + n_ts * 4 <= len(self.data): + all_ts = np.frombuffer(self.data, dtype=np.uint32, count=n_ts, offset=ofs_ts).copy() + + # Read flat key array + if key_dtype == "c4quat": + all_keys_flat = np.empty(0, dtype=np.float32) + if n_keys > 0 and ofs_keys + n_keys * 16 <= len(self.data): + all_keys_flat = np.frombuffer(self.data, dtype=np.float32, count=n_keys * 4, offset=ofs_keys) + all_keys_flat = all_keys_flat.reshape(n_keys, 4).copy() + elif key_dtype == "vec3": + all_keys_flat = np.empty((0, 3), dtype=np.float32) + if n_keys > 0 and ofs_keys + n_keys * 12 <= len(self.data): + all_keys_flat = np.frombuffer(self.data, dtype=np.float32, count=n_keys * 3, offset=ofs_keys) + all_keys_flat = all_keys_flat.reshape(n_keys, 3).copy() + else: + all_keys_flat = np.empty(0, dtype=np.float32) + if n_keys > 0 and ofs_keys + n_keys * key_size <= len(self.data): + all_keys_flat = np.frombuffer(self.data, dtype=np.float32, count=n_keys, offset=ofs_keys).copy() + + # Read ranges and split into per-sequence arrays + if n_ranges > 0 and n_ranges < 5000 and ofs_ranges + n_ranges * 8 <= len(self.data): + for r in range(n_ranges): + rng_start, rng_end = struct.unpack_from(" rng_start and rng_end <= len(all_ts): + track.timestamps.append(all_ts[rng_start:rng_end]) + if key_dtype in ("c4quat", "vec3") and rng_end <= len(all_keys_flat): + track.keys.append(all_keys_flat[rng_start:rng_end]) + elif rng_end <= len(all_keys_flat): + track.keys.append(all_keys_flat[rng_start:rng_end]) + else: + track.keys.append(np.empty(0, dtype=np.float32)) + else: + track.timestamps.append(np.empty(0, dtype=np.uint32)) + track.keys.append(np.empty(0, dtype=np.float32)) + else: + # No ranges — treat entire array as single sequence + if len(all_ts) > 0: + track.timestamps.append(all_ts) + track.keys.append(all_keys_flat if len(all_keys_flat) > 0 else np.empty(0, dtype=np.float32)) + + return track + + def _parse_bones(self): + n, ofs = self._read_m2array("Bones") + if n == 0 or n > 5000: + return + + if self.is_vanilla: + bone_size = 108 # No boneNameCRC, 28-byte tracks: 4+4+2+2+3×28+12=108 + for i in range(n): + base = ofs + i * bone_size + if base + bone_size > len(self.data): + break + bone = M2Bone() + bone.key_bone_id = struct.unpack_from(" len(self.data): + break + bone = M2Bone() + bone.key_bone_id = struct.unpack_from(" 500000: + return + if n_tris == 0 or n_tris > 500000: + return + + # Vertex lookup + if ofs_indices + n_indices * 2 <= len(skin_data): + self.vertex_lookup = np.frombuffer(skin_data, dtype=np.uint16, + count=n_indices, offset=ofs_indices).copy() + + # Raw triangle indices (indices into vertex_lookup) + if ofs_tris + n_tris * 2 <= len(skin_data): + self.triangles = np.frombuffer(skin_data, dtype=np.uint16, + count=n_tris, offset=ofs_tris).copy() + + # Resolve two-level indirection: triangle idx -> vertex_lookup -> global vertex idx + # This matches the C++ approach: model.indices stores global vertex indices + if len(self.triangles) > 0 and len(self.vertex_lookup) > 0: + n_verts = len(self.positions) if len(self.positions) > 0 else 65536 + resolved = np.zeros(len(self.triangles), dtype=np.uint16) + for i, tri_idx in enumerate(self.triangles): + if tri_idx < len(self.vertex_lookup): + global_idx = self.vertex_lookup[tri_idx] + resolved[i] = global_idx if global_idx < n_verts else 0 + else: + resolved[i] = 0 + self.resolved_indices = resolved + + # Submeshes (WotLK: 48 bytes, Vanilla: 32 bytes) + submesh_size = 32 if self.is_vanilla else 48 + if n_submeshes > 0 and n_submeshes < 10000 and ofs_submeshes + n_submeshes * submesh_size <= len(skin_data): + for i in range(n_submeshes): + base = ofs_submeshes + i * submesh_size + sm = M2Submesh() + # WotLK M2SkinSection: +0=skinSectionId(2), +2=Level(2), + # +4=vertexStart(2), +6=vertexCount(2), +8=indexStart(2), +10=indexCount(2) + sm.vertex_start = struct.unpack_from(" 0 and n_batches < 10000 and ofs_batches + n_batches * 24 <= len(skin_data): + for i in range(n_batches): + base = ofs_batches + i * 24 + batch = M2Batch() + # M2Batch: flags(1) + priority(1) + shaderId(2) + skinSectionIndex(2) + # + geosetIndex(2) + colorIndex(2) + materialIndex(2) + materialLayer(2) + # + textureCount(2) + textureComboIndex(2) + ... + batch.submesh_index = struct.unpack_from(" 0: + self.time_ms += dt * 1000.0 * self.speed + self.time_ms = self.time_ms % anim.duration + + seq_idx = self.current_seq + t = self.time_ms + + for i, bone in enumerate(self.parser.bones): + local = self._eval_bone(bone, seq_idx, t) + if bone.parent >= 0 and bone.parent < n_bones: + self.bone_matrices[i] = self.bone_matrices[bone.parent] @ local + else: + self.bone_matrices[i] = local + + def _eval_bone(self, bone: M2Bone, seq_idx: int, time_ms: float) -> np.ndarray: + """Compute local bone transform for one bone at given time.""" + trans = self._interp_vec3(bone.translation, seq_idx, time_ms, np.zeros(3, dtype=np.float32)) + rot = self._interp_quat(bone.rotation, seq_idx, time_ms) + scl = self._interp_vec3(bone.scale, seq_idx, time_ms, np.ones(3, dtype=np.float32)) + + # local = T(pivot) * T(trans) * R(rot) * S(scl) * T(-pivot) + p = bone.pivot + m = translate(p[0], p[1], p[2]) + m = m @ translate(trans[0], trans[1], trans[2]) + m = m @ quat_to_mat4(rot) + m = m @ scale_mat4(scl[0], scl[1], scl[2]) + m = m @ translate(-p[0], -p[1], -p[2]) + return m + + def _get_time_and_seq(self, track: M2Track, seq_idx: int, time_ms: float) -> tuple[int, float]: + """Resolve sequence index and time, handling global sequences.""" + if track.global_sequence >= 0 and track.global_sequence < len(self.parser.global_sequences): + gs_dur = self.parser.global_sequences[track.global_sequence] + actual_seq = 0 + actual_time = time_ms % gs_dur if gs_dur > 0 else 0 + else: + actual_seq = seq_idx + actual_time = time_ms + return actual_seq, actual_time + + def _interp_vec3(self, track: M2Track, seq_idx: int, time_ms: float, + default: np.ndarray) -> np.ndarray: + si, t = self._get_time_and_seq(track, seq_idx, time_ms) + if si >= len(track.timestamps) or si >= len(track.keys): + return default + ts = track.timestamps[si] + keys = track.keys[si] + if len(ts) == 0 or len(keys) == 0: + return default + if len(keys.shape) == 1: + return default + + if t <= ts[0]: + return keys[0] + if t >= ts[-1]: + return keys[-1] + + # Binary search + idx = np.searchsorted(ts, t, side='right') - 1 + idx = max(0, min(idx, len(ts) - 2)) + t0, t1 = float(ts[idx]), float(ts[idx + 1]) + frac = (t - t0) / (t1 - t0) if t1 != t0 else 0.0 + + if track.interp == 0: + return keys[idx] + return keys[idx] * (1.0 - frac) + keys[idx + 1] * frac + + def _interp_quat(self, track: M2Track, seq_idx: int, time_ms: float) -> np.ndarray: + default = np.array([0, 0, 0, 1], dtype=np.float32) + si, t = self._get_time_and_seq(track, seq_idx, time_ms) + if si >= len(track.timestamps) or si >= len(track.keys): + return default + ts = track.timestamps[si] + keys = track.keys[si] + if len(ts) == 0 or len(keys) == 0: + return default + if len(keys.shape) == 1: + return default + + if t <= ts[0]: + return keys[0] + if t >= ts[-1]: + return keys[-1] + + idx = np.searchsorted(ts, t, side='right') - 1 + idx = max(0, min(idx, len(ts) - 2)) + t0, t1 = float(ts[idx]), float(ts[idx + 1]) + frac = (t - t0) / (t1 - t0) if t1 != t0 else 0.0 + + if track.interp == 0: + return keys[idx] + return slerp(keys[idx], keys[idx + 1], frac) + + def skin_vertices(self, positions: np.ndarray, bone_weights: np.ndarray, + bone_indices: np.ndarray, bone_lookup: list[int]) -> np.ndarray: + """CPU vertex skinning (NumPy vectorized). Returns transformed positions.""" + if len(self.bone_matrices) == 0 or len(bone_lookup) == 0: + return positions.copy() + + n = len(positions) + n_bones = len(self.bone_matrices) + n_lookup = len(bone_lookup) + lookup_arr = np.array(bone_lookup, dtype=np.int32) + + # Build homogeneous positions (n, 4) + pos4 = np.ones((n, 4), dtype=np.float32) + pos4[:, :3] = positions + + # Weights normalized to float (n, 4) + weights = bone_weights.astype(np.float32) / 255.0 + + result = np.zeros((n, 4), dtype=np.float32) + + for j in range(4): + w = weights[:, j] # (n,) + mask = w > 0.001 + if not np.any(mask): + continue + + bi = bone_indices[mask, j].astype(np.int32) + # Clamp bone lookup indices + valid = bi < n_lookup + bi = np.where(valid, bi, 0) + global_bones = lookup_arr[bi] + global_bones = np.where(valid, global_bones, 0) + valid2 = valid & (global_bones < n_bones) + global_bones = np.where(valid2, global_bones, 0) + + # Gather bone matrices for these vertices: (count, 4, 4) + mats = self.bone_matrices[global_bones] + # Transform: (count, 4, 4) @ (count, 4, 1) -> (count, 4, 1) + transformed = np.einsum('nij,nj->ni', mats, pos4[mask]) + # Apply weight and validity + weighted = transformed * w[mask, np.newaxis] + weighted[~valid2] = 0 + result[mask] += weighted + + # De-homogenize + w_col = result[:, 3:4] + w_col = np.where(np.abs(w_col) > 0.001, w_col, 1.0) + return (result[:, :3] / w_col).astype(np.float32) + + +# --------------------------------------------------------------------------- +# Orbit Camera +# --------------------------------------------------------------------------- + +class OrbitCamera: + def __init__(self): + self.azimuth: float = 0.0 + self.elevation: float = 0.3 + self.distance: float = 5.0 + self.target: np.ndarray = np.zeros(3, dtype=np.float32) + self.pan_x: float = 0.0 + self.pan_y: float = 0.0 + + def get_view_matrix(self) -> np.ndarray: + eye = self._eye_pos() + up = np.array([0, 0, 1], dtype=np.float32) + target = self.target + np.array([self.pan_x, self.pan_y, 0], dtype=np.float32) + return look_at(eye, target, up) + + def _eye_pos(self) -> np.ndarray: + x = self.distance * math.cos(self.elevation) * math.cos(self.azimuth) + y = self.distance * math.cos(self.elevation) * math.sin(self.azimuth) + z = self.distance * math.sin(self.elevation) + target = self.target + np.array([self.pan_x, self.pan_y, 0], dtype=np.float32) + return target + np.array([x, y, z], dtype=np.float32) + + def orbit(self, dx: float, dy: float): + self.azimuth += dx * 0.01 + self.elevation = max(-math.pi / 2 + 0.01, min(math.pi / 2 - 0.01, + self.elevation + dy * 0.01)) + + def zoom(self, delta: float): + self.distance = max(0.5, self.distance * (1.0 - delta * 0.1)) + + def pan(self, dx: float, dy: float): + self.pan_x += dx * self.distance * 0.002 + self.pan_y += dy * self.distance * 0.002 + + +# --------------------------------------------------------------------------- +# M2 Renderer (OpenGL 3.3) +# --------------------------------------------------------------------------- + +VERT_SHADER = """ +#version 330 core +layout(location=0) in vec3 aPos; +layout(location=1) in vec3 aNormal; +layout(location=2) in vec2 aUV; + +uniform mat4 uMVP; +uniform mat4 uModel; + +out vec3 vNormal; +out vec2 vUV; +out vec3 vWorldPos; + +void main() { + gl_Position = uMVP * vec4(aPos, 1.0); + vNormal = mat3(uModel) * aNormal; + vUV = aUV; + vWorldPos = (uModel * vec4(aPos, 1.0)).xyz; +} +""" + +FRAG_SHADER = """ +#version 330 core +in vec3 vNormal; +in vec2 vUV; +in vec3 vWorldPos; + +uniform sampler2D uTexture; +uniform int uHasTexture; +uniform vec3 uLightDir; + +out vec4 FragColor; + +void main() { + vec3 N = normalize(vNormal); + float NdotL = abs(dot(N, uLightDir)); + float ambient = 0.35; + float diffuse = 0.65 * NdotL; + float light = ambient + diffuse; + + vec4 texColor; + if (uHasTexture == 1) { + texColor = texture(uTexture, vUV); + if (texColor.a < 0.1) discard; + } else { + texColor = vec4(0.6, 0.6, 0.65, 1.0); + } + + FragColor = vec4(texColor.rgb * light, texColor.a); +} +""" + +WIRE_VERT = """ +#version 330 core +layout(location=0) in vec3 aPos; +uniform mat4 uMVP; +void main() { + gl_Position = uMVP * vec4(aPos, 1.0); +} +""" + +WIRE_FRAG = """ +#version 330 core +out vec4 FragColor; +void main() { + FragColor = vec4(0.0, 0.8, 1.0, 0.4); +} +""" + + +class M2Renderer: + """OpenGL 3.3 renderer for M2 models.""" + + def __init__(self, parser: M2Parser, blp_paths: dict[str, str], blp_convert: str): + self.parser = parser + self.blp_paths = blp_paths # texture filename -> filesystem path + self.blp_convert_path = blp_convert + + self.vao = 0 + self.vbo = 0 + self.ebo = 0 + self.wire_vao = 0 + self.wire_vbo = 0 + self.wire_ebo = 0 + self.shader = 0 + self.wire_shader = 0 + self.gl_textures: dict[int, int] = {} # batch index -> GL texture ID + self.batch_texture_map: dict[int, int] = {} # batch idx -> texture array index + + self.show_wireframe = False + self.n_indices = 0 + self.n_wire_indices = 0 + self.n_verts = 0 + + def init_gl(self): + import OpenGL.GL as gl + + self._gl = gl + + # Build shaders + self.shader = self._compile_program(VERT_SHADER, FRAG_SHADER) + self.wire_shader = self._compile_program(WIRE_VERT, WIRE_FRAG) + + p = self.parser + n_verts = len(p.positions) + if n_verts == 0: + return + self.n_verts = n_verts + + # VBO: ALL model vertices, interleaved pos(12) + normal(12) + uv(8) = 32 bytes + vbo_data = np.zeros((n_verts, 8), dtype=np.float32) + vbo_data[:, 0:3] = p.positions + vbo_data[:, 3:6] = p.normals if len(p.normals) == n_verts else np.zeros((n_verts, 3), dtype=np.float32) + vbo_data[:, 6:8] = p.uvs if len(p.uvs) == n_verts else np.zeros((n_verts, 2), dtype=np.float32) + + # EBO: resolved global vertex indices (after two-level skin indirection) + if len(p.resolved_indices) > 0: + idx_data = p.resolved_indices.astype(np.uint16) + elif len(p.triangles) > 0: + idx_data = p.triangles.astype(np.uint16) + else: + idx_data = np.empty(0, dtype=np.uint16) + + # Create main VAO/VBO/EBO + self.vao = gl.glGenVertexArrays(1) + self.vbo = gl.glGenBuffers(1) + self.ebo = gl.glGenBuffers(1) + + gl.glBindVertexArray(self.vao) + + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vbo) + gl.glBufferData(gl.GL_ARRAY_BUFFER, vbo_data.nbytes, vbo_data, gl.GL_DYNAMIC_DRAW) + + if len(idx_data) > 0: + self.n_indices = len(idx_data) + gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self.ebo) + gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, idx_data.nbytes, idx_data, gl.GL_STATIC_DRAW) + + stride = 32 + gl.glVertexAttribPointer(0, 3, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(0)) + gl.glEnableVertexAttribArray(0) + gl.glVertexAttribPointer(1, 3, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(12)) + gl.glEnableVertexAttribArray(1) + gl.glVertexAttribPointer(2, 2, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(24)) + gl.glEnableVertexAttribArray(2) + + gl.glBindVertexArray(0) + + # Wireframe VAO (positions only, same indices) + self.wire_vao = gl.glGenVertexArrays(1) + self.wire_vbo = gl.glGenBuffers(1) + self.wire_ebo = gl.glGenBuffers(1) + + gl.glBindVertexArray(self.wire_vao) + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.wire_vbo) + gl.glBufferData(gl.GL_ARRAY_BUFFER, p.positions.nbytes, p.positions, gl.GL_DYNAMIC_DRAW) + + if len(idx_data) > 0: + self.n_wire_indices = len(idx_data) + gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self.wire_ebo) + gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, idx_data.nbytes, idx_data, gl.GL_STATIC_DRAW) + + gl.glVertexAttribPointer(0, 3, gl.GL_FLOAT, gl.GL_FALSE, 12, gl.ctypes.c_void_p(0)) + gl.glEnableVertexAttribArray(0) + gl.glBindVertexArray(0) + + # Load textures + self._load_textures() + + # Map batches to textures + self._map_batch_textures() + + def _compile_program(self, vert_src: str, frag_src: str) -> int: + gl = self._gl + vs = gl.glCreateShader(gl.GL_VERTEX_SHADER) + gl.glShaderSource(vs, vert_src) + gl.glCompileShader(vs) + if gl.glGetShaderiv(vs, gl.GL_COMPILE_STATUS) != gl.GL_TRUE: + log = gl.glGetShaderInfoLog(vs).decode() + print(f"Vertex shader error: {log}") + + fs = gl.glCreateShader(gl.GL_FRAGMENT_SHADER) + gl.glShaderSource(fs, frag_src) + gl.glCompileShader(fs) + if gl.glGetShaderiv(fs, gl.GL_COMPILE_STATUS) != gl.GL_TRUE: + log = gl.glGetShaderInfoLog(fs).decode() + print(f"Fragment shader error: {log}") + + prog = gl.glCreateProgram() + gl.glAttachShader(prog, vs) + gl.glAttachShader(prog, fs) + gl.glLinkProgram(prog) + if gl.glGetProgramiv(prog, gl.GL_LINK_STATUS) != gl.GL_TRUE: + log = gl.glGetProgramInfoLog(prog).decode() + print(f"Program link error: {log}") + + gl.glDeleteShader(vs) + gl.glDeleteShader(fs) + return prog + + def _load_textures(self): + """Load BLP textures via blp_convert → PIL → GL texture.""" + gl = self._gl + try: + from PIL import Image + except ImportError: + print("PIL not available, textures disabled") + return + + cache_dir = Path(os.path.expanduser("~/.cache/m2_viewer")) + cache_dir.mkdir(parents=True, exist_ok=True) + + for i, tex in enumerate(self.parser.textures): + if tex["type"] != 0 or not tex["filename"]: + continue + + fname = tex["filename"].replace("\\", "/") + blp_path = self.blp_paths.get(fname) or self.blp_paths.get(fname.lower()) + if not blp_path: + continue + + # Convert BLP to PNG + cache_key = hashlib.md5(blp_path.encode()).hexdigest() + cached_png = cache_dir / f"{cache_key}.png" + + if not cached_png.exists(): + try: + # Copy BLP to temp dir for conversion (avoids read-only source dirs) + import tempfile + with tempfile.TemporaryDirectory() as tmpdir: + tmp_blp = Path(tmpdir) / Path(blp_path).name + shutil.copy2(blp_path, str(tmp_blp)) + result = subprocess.run( + [self.blp_convert_path, "--to-png", str(tmp_blp)], + capture_output=True, text=True, timeout=10, + ) + output_png = tmp_blp.with_suffix(".png") + if result.returncode != 0 or not output_png.exists(): + print(f"blp_convert failed for {fname}: {result.stderr}") + continue + shutil.move(str(output_png), str(cached_png)) + except Exception as e: + print(f"BLP convert failed for {fname}: {e}") + continue + + try: + img = Image.open(cached_png) + img = img.transpose(Image.FLIP_TOP_BOTTOM) + if img.mode != "RGBA": + img = img.convert("RGBA") + img_data = np.array(img, dtype=np.uint8) + + tex_id = gl.glGenTextures(1) + gl.glBindTexture(gl.GL_TEXTURE_2D, tex_id) + gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, img.width, img.height, + 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, img_data) + gl.glGenerateMipmap(gl.GL_TEXTURE_2D) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR_MIPMAP_LINEAR) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_REPEAT) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_REPEAT) + + self.gl_textures[i] = tex_id + except Exception as e: + print(f"Texture load failed for {fname}: {e}") + + def _map_batch_textures(self): + """Resolve batch → texture combo → texture lookup → GL texture mapping.""" + for bi, batch in enumerate(self.parser.batches): + tci = batch.texture_combo_index + if tci < len(self.parser.texture_lookup): + tex_idx = self.parser.texture_lookup[tci] + if tex_idx in self.gl_textures: + self.batch_texture_map[bi] = self.gl_textures[tex_idx] + + def update_vertices(self, skinned_positions: np.ndarray): + """Upload new skinned vertex positions to VBO.""" + gl = self._gl + if self.vao == 0 or len(skinned_positions) == 0: + return + + p = self.parser + n_verts = len(skinned_positions) + + # Rebuild interleaved VBO data with new positions + vbo_data = np.zeros((n_verts, 8), dtype=np.float32) + vbo_data[:, 0:3] = skinned_positions + vbo_data[:, 3:6] = p.normals if len(p.normals) == n_verts else np.zeros((n_verts, 3), dtype=np.float32) + vbo_data[:, 6:8] = p.uvs if len(p.uvs) == n_verts else np.zeros((n_verts, 2), dtype=np.float32) + + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.vbo) + gl.glBufferSubData(gl.GL_ARRAY_BUFFER, 0, vbo_data.nbytes, vbo_data) + + # Update wireframe VBO too + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self.wire_vbo) + gl.glBufferSubData(gl.GL_ARRAY_BUFFER, 0, skinned_positions.nbytes, skinned_positions) + + def render(self, mvp: np.ndarray, model: np.ndarray): + gl = self._gl + if self.vao == 0 or self.n_indices == 0: + return + + gl.glEnable(gl.GL_DEPTH_TEST) + gl.glDisable(gl.GL_CULL_FACE) + + gl.glUseProgram(self.shader) + + mvp_loc = gl.glGetUniformLocation(self.shader, "uMVP") + model_loc = gl.glGetUniformLocation(self.shader, "uModel") + tex_loc = gl.glGetUniformLocation(self.shader, "uTexture") + has_tex_loc = gl.glGetUniformLocation(self.shader, "uHasTexture") + light_loc = gl.glGetUniformLocation(self.shader, "uLightDir") + + gl.glUniformMatrix4fv(mvp_loc, 1, gl.GL_TRUE, mvp) + gl.glUniformMatrix4fv(model_loc, 1, gl.GL_TRUE, model) + gl.glUniform1i(tex_loc, 0) + + # Light direction (normalized) + light_dir = np.array([0.5, 0.3, 0.8], dtype=np.float32) + light_dir /= np.linalg.norm(light_dir) + gl.glUniform3fv(light_loc, 1, light_dir) + + gl.glBindVertexArray(self.vao) + + if self.parser.batches and self.parser.submeshes: + # Per-batch rendering + for bi, batch in enumerate(self.parser.batches): + si = batch.submesh_index + if si >= len(self.parser.submeshes): + continue + sm = self.parser.submeshes[si] + + # Bind texture if available + gl_tex = self.batch_texture_map.get(bi) + if gl_tex: + gl.glActiveTexture(gl.GL_TEXTURE0) + gl.glBindTexture(gl.GL_TEXTURE_2D, gl_tex) + gl.glUniform1i(has_tex_loc, 1) + else: + gl.glUniform1i(has_tex_loc, 0) + + # Draw this submesh's triangles + idx_start = sm.index_start + idx_count = sm.index_count + if idx_start + idx_count <= self.n_indices: + gl.glDrawElements(gl.GL_TRIANGLES, idx_count, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(idx_start * 2)) + else: + # Fallback: draw all triangles with no texture + gl.glUniform1i(has_tex_loc, 0) + gl.glDrawElements(gl.GL_TRIANGLES, self.n_indices, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(0)) + + gl.glBindVertexArray(0) + + # Wireframe overlay + if self.show_wireframe and self.wire_vao and self.n_wire_indices > 0: + gl.glUseProgram(self.wire_shader) + wire_mvp_loc = gl.glGetUniformLocation(self.wire_shader, "uMVP") + gl.glUniformMatrix4fv(wire_mvp_loc, 1, gl.GL_TRUE, mvp) + + gl.glEnable(gl.GL_BLEND) + gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) + gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_LINE) + gl.glDisable(gl.GL_CULL_FACE) + + gl.glBindVertexArray(self.wire_vao) + gl.glDrawElements(gl.GL_TRIANGLES, self.n_wire_indices, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(0)) + gl.glBindVertexArray(0) + + gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_FILL) + gl.glDisable(gl.GL_BLEND) + + +# --------------------------------------------------------------------------- +# M2 Viewer Window (Pygame main loop) +# --------------------------------------------------------------------------- + +class M2ViewerWindow: + """Pygame + OpenGL M2 model viewer window.""" + + def __init__(self, m2_path: str, blp_paths: dict[str, str], blp_convert: str): + self.m2_path = m2_path + self.blp_paths = blp_paths + self.blp_convert = blp_convert + self.parser: M2Parser | None = None + self.anim_system: AnimationSystem | None = None + self.renderer: M2Renderer | None = None + self.camera = OrbitCamera() + self.width = 1024 + self.height = 768 + self.running = True + self.fps_clock = None + self.font = None + + self._dragging = False + self._panning = False + self._last_mouse = (0, 0) + + def run(self): + """Main entry point — parse, init GL, run loop.""" + import pygame + from pygame.locals import ( + DOUBLEBUF, OPENGL, RESIZABLE, QUIT, KEYDOWN, MOUSEBUTTONDOWN, + MOUSEBUTTONUP, MOUSEMOTION, VIDEORESIZE, + K_SPACE, K_LEFT, K_RIGHT, K_PLUS, K_MINUS, K_EQUALS, K_r, K_w, + K_ESCAPE, + ) + + # Parse M2 + data = Path(self.m2_path).read_bytes() + if len(data) < 8 or data[:4] != b"MD20": + print(f"Not a valid M2 file: {self.m2_path}") + return + + self.parser = M2Parser(data) + + # Load skin file + m2_p = Path(self.m2_path) + skin_path = m2_p.with_name(m2_p.stem + "00.skin") + if skin_path.exists(): + self.parser.parse_skin_data(skin_path.read_bytes()) + elif self.parser.is_vanilla: + # Embedded skin at ofsViews + if self.parser.version <= 256: + # Read ofsViews from vanilla header + if len(data) > 108: + ofs_views = struct.unpack_from(" 0 and ofs_views < len(data): + self.parser.parse_skin_data(data[ofs_views:]) + + # Init animation + self.anim_system = AnimationSystem(self.parser) + if self.parser.animations: + self.anim_system.set_sequence(0) + + # Auto-fit camera + if len(self.parser.positions) > 0: + mins = self.parser.positions.min(axis=0) + maxs = self.parser.positions.max(axis=0) + center = (mins + maxs) / 2.0 + extent = np.linalg.norm(maxs - mins) + self.camera.target = center + self.camera.distance = max(extent * 1.2, 1.0) + + # Init Pygame + OpenGL + pygame.init() + pygame.display.set_caption(f"M2 Viewer — {Path(self.m2_path).name}") + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MAJOR_VERSION, 3) + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MINOR_VERSION, 3) + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_PROFILE_MASK, + pygame.GL_CONTEXT_PROFILE_CORE) + pygame.display.set_mode((self.width, self.height), DOUBLEBUF | OPENGL | RESIZABLE) + + self.fps_clock = pygame.time.Clock() + self.font = pygame.font.SysFont("monospace", 14) + + import OpenGL.GL as gl + + # Init renderer + self.renderer = M2Renderer(self.parser, self.blp_paths, self.blp_convert) + self.renderer.init_gl() + + gl.glClearColor(0.12, 0.12, 0.18, 1.0) + gl.glEnable(gl.GL_DEPTH_TEST) + + # Main loop + while self.running: + dt = self.fps_clock.tick(60) / 1000.0 + + for event in pygame.event.get(): + if event.type == QUIT: + self.running = False + elif event.type == VIDEORESIZE: + self.width, self.height = event.w, event.h + pygame.display.set_mode((self.width, self.height), + DOUBLEBUF | OPENGL | RESIZABLE) + elif event.type == KEYDOWN: + self._handle_key(event.key) + elif event.type == MOUSEBUTTONDOWN: + if event.button == 1: + self._dragging = True + self._last_mouse = event.pos + elif event.button == 3: + self._panning = True + self._last_mouse = event.pos + elif event.button == 4: + self.camera.zoom(1) + elif event.button == 5: + self.camera.zoom(-1) + elif event.type == MOUSEBUTTONUP: + if event.button == 1: + self._dragging = False + elif event.button == 3: + self._panning = False + elif event.type == MOUSEMOTION: + if self._dragging: + dx = event.pos[0] - self._last_mouse[0] + dy = event.pos[1] - self._last_mouse[1] + self.camera.orbit(dx, dy) + self._last_mouse = event.pos + elif self._panning: + dx = event.pos[0] - self._last_mouse[0] + dy = event.pos[1] - self._last_mouse[1] + self.camera.pan(-dx, dy) + self._last_mouse = event.pos + + # Update animation + skinning + if self.anim_system: + self.anim_system.update(dt) + if (len(self.anim_system.bone_matrices) > 0 + and len(self.parser.bone_lookup) > 0): + skinned = self.anim_system.skin_vertices( + self.parser.positions, + self.parser.bone_weights, + self.parser.bone_indices, + self.parser.bone_lookup, + ) + self.renderer.update_vertices(skinned) + + # Render + gl.glViewport(0, 0, self.width, self.height) + gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) + + aspect = self.width / max(self.height, 1) + proj = perspective(45.0, aspect, 0.01, 5000.0) + view = self.camera.get_view_matrix() + model = np.eye(4, dtype=np.float32) + mvp = proj @ view @ model + + self.renderer.render(mvp, model) + + # HUD overlay + self._draw_hud(pygame, gl) + + pygame.display.flip() + + pygame.quit() + + def _handle_key(self, key): + import pygame + if key == pygame.K_ESCAPE: + self.running = False + elif key == pygame.K_SPACE: + if self.anim_system: + self.anim_system.playing = not self.anim_system.playing + elif key == pygame.K_RIGHT: + if self.anim_system and self.parser.animations: + idx = (self.anim_system.current_seq + 1) % len(self.parser.animations) + self.anim_system.set_sequence(idx) + elif key == pygame.K_LEFT: + if self.anim_system and self.parser.animations: + idx = (self.anim_system.current_seq - 1) % len(self.parser.animations) + self.anim_system.set_sequence(idx) + elif key in (pygame.K_PLUS, pygame.K_EQUALS, pygame.K_KP_PLUS): + if self.anim_system: + self.anim_system.speed = min(self.anim_system.speed + 0.25, 5.0) + elif key in (pygame.K_MINUS, pygame.K_KP_MINUS): + if self.anim_system: + self.anim_system.speed = max(self.anim_system.speed - 0.25, 0.0) + elif key == pygame.K_r: + if self.anim_system: + self.anim_system.time_ms = 0.0 + self.anim_system.playing = False + self.anim_system.bone_matrices = np.empty(0) + elif key == pygame.K_w: + if self.renderer: + self.renderer.show_wireframe = not self.renderer.show_wireframe + + def _draw_hud(self, pygame, gl): + """Draw text overlay using Pygame font → texture approach.""" + if not self.font: + return + + lines = [Path(self.m2_path).name] + + n_verts = len(self.parser.positions) + n_tris = len(self.parser.triangles) // 3 + lines.append(f"{n_verts} verts, {n_tris} tris, {len(self.parser.textures)} tex") + + if self.parser.animations and self.anim_system: + anim = self.parser.animations[self.anim_system.current_seq] + name = _ANIM_NAMES.get(anim.anim_id, f"Anim {anim.anim_id}") + state = "Playing" if self.anim_system.playing else "Paused" + lines.append(f"[{self.anim_system.current_seq + 1}/{len(self.parser.animations)}] " + f"{name} ({anim.duration}ms) - {state} x{self.anim_system.speed:.1f}") + else: + lines.append("No animations") + + fps = self.fps_clock.get_fps() if self.fps_clock else 0 + lines.append(f"FPS: {fps:.0f}") + + lines.append("") + lines.append("LMB: orbit | RMB: pan | Scroll: zoom") + lines.append("Space: play/pause | Left/Right: anim | +/-: speed") + lines.append("W: wireframe | R: reset | Esc: quit") + + # Render text to surface, then blit via orthographic projection + # Use a simple texture-based approach + line_height = 18 + total_height = len(lines) * line_height + 8 + surf_width = 450 + surf = pygame.Surface((surf_width, total_height), pygame.SRCALPHA) + surf.fill((0, 0, 0, 160)) + + for i, line in enumerate(lines): + text_surf = self.font.render(line, True, (220, 220, 240)) + surf.blit(text_surf, (6, 4 + i * line_height)) + + # Convert to OpenGL texture and draw + text_data = pygame.image.tostring(surf, "RGBA", True) + tex_id = gl.glGenTextures(1) + gl.glBindTexture(gl.GL_TEXTURE_2D, tex_id) + gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, surf_width, total_height, + 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, text_data) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST) + + # Draw fullscreen quad in ortho — use compatibility approach with glWindowPos + glDrawPixels + # Simpler: use a small shader-less blit via fixed function emulation + # Actually, let's just use the modern approach with a screen quad + self._blit_texture(gl, tex_id, 8, self.height - total_height - 8, surf_width, total_height) + + gl.glDeleteTextures(1, [tex_id]) + + def _blit_texture(self, gl, tex_id, x, y, w, h): + """Blit a texture to screen at (x,y) using a temporary screen-space quad.""" + # Simple blit using glBlitFramebuffer alternative: + # Create a minimal screen-space shader + quad + if not hasattr(self, '_blit_shader'): + blit_vert = """ +#version 330 core +layout(location=0) in vec2 aPos; +layout(location=1) in vec2 aUV; +out vec2 vUV; +void main() { + gl_Position = vec4(aPos, 0.0, 1.0); + vUV = aUV; +} +""" + blit_frag = """ +#version 330 core +in vec2 vUV; +uniform sampler2D uTex; +out vec4 FragColor; +void main() { + FragColor = texture(uTex, vUV); +} +""" + self._blit_shader = self.renderer._compile_program(blit_vert, blit_frag) + self._blit_vao = gl.glGenVertexArrays(1) + self._blit_vbo = gl.glGenBuffers(1) + + # Convert pixel coords to NDC + x0 = 2.0 * x / self.width - 1.0 + y0 = 2.0 * y / self.height - 1.0 + x1 = 2.0 * (x + w) / self.width - 1.0 + y1 = 2.0 * (y + h) / self.height - 1.0 + + quad = np.array([ + x0, y0, 0.0, 0.0, + x1, y0, 1.0, 0.0, + x1, y1, 1.0, 1.0, + x0, y0, 0.0, 0.0, + x1, y1, 1.0, 1.0, + x0, y1, 0.0, 1.0, + ], dtype=np.float32) + + gl.glBindVertexArray(self._blit_vao) + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._blit_vbo) + gl.glBufferData(gl.GL_ARRAY_BUFFER, quad.nbytes, quad, gl.GL_DYNAMIC_DRAW) + gl.glVertexAttribPointer(0, 2, gl.GL_FLOAT, gl.GL_FALSE, 16, gl.ctypes.c_void_p(0)) + gl.glEnableVertexAttribArray(0) + gl.glVertexAttribPointer(1, 2, gl.GL_FLOAT, gl.GL_FALSE, 16, gl.ctypes.c_void_p(8)) + gl.glEnableVertexAttribArray(1) + + gl.glDisable(gl.GL_DEPTH_TEST) + gl.glEnable(gl.GL_BLEND) + gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) + + gl.glUseProgram(self._blit_shader) + gl.glActiveTexture(gl.GL_TEXTURE0) + gl.glBindTexture(gl.GL_TEXTURE_2D, tex_id) + gl.glUniform1i(gl.glGetUniformLocation(self._blit_shader, "uTex"), 0) + + gl.glDrawArrays(gl.GL_TRIANGLES, 0, 6) + + gl.glBindVertexArray(0) + gl.glEnable(gl.GL_DEPTH_TEST) + gl.glDisable(gl.GL_BLEND) + + +# --------------------------------------------------------------------------- +# WMO Parser +# --------------------------------------------------------------------------- + +@dataclass +class WMOBatch: + start_index: int = 0 + index_count: int = 0 + material_id: int = 0 + + +@dataclass +class WMOMaterial: + flags: int = 0 + shader: int = 0 + blend_mode: int = 0 + texture1_ofs: int = 0 + texture2_ofs: int = 0 + texture3_ofs: int = 0 + color1: int = 0 + color2: int = 0 + + +@dataclass +class WMOGroup: + positions: np.ndarray = field(default_factory=lambda: np.empty((0, 3), dtype=np.float32)) + normals: np.ndarray = field(default_factory=lambda: np.empty((0, 3), dtype=np.float32)) + uvs: np.ndarray = field(default_factory=lambda: np.empty((0, 2), dtype=np.float32)) + indices: np.ndarray = field(default_factory=lambda: np.empty(0, dtype=np.uint16)) + batches: list = field(default_factory=list) + + +class WMOParser: + """Parse WMO root + group files for rendering.""" + + def __init__(self): + self.textures: list[str] = [] + self.texture_offset_map: dict[int, int] = {} # MOTX byte offset -> texture index + self.materials: list[WMOMaterial] = [] + self.groups: list[WMOGroup] = [] + self.n_groups_expected: int = 0 + + def parse_root(self, data: bytes): + """Parse root WMO file for textures and materials.""" + pos = 0 + while pos + 8 <= len(data): + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from(" len(data): + break + + cid = chunk_id if chunk_id[:1] == b"M" else chunk_id[::-1] + + if cid == b"MOHD" and chunk_size >= 16: + # nTextures at +0, nGroups at +4 + self.n_groups_expected = struct.unpack_from(" WMOGroup: + """Parse a WMO group file for geometry.""" + group = WMOGroup() + pos = 0 + + # Scan for MOGP chunk which wraps all sub-chunks + mogp_start = -1 + mogp_end = len(data) + while pos + 8 <= len(data): + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from("= 0 else 0 + pos = scan_start + while pos + 8 <= mogp_end: + chunk_id = data[pos:pos + 4] + chunk_size = struct.unpack_from(" mogp_end: + break + + cid = chunk_id if chunk_id[:1] == b"M" else chunk_id[::-1] + + if cid == b"MOVT": + n = chunk_size // 12 + group.positions = np.zeros((n, 3), dtype=np.float32) + for i in range(n): + group.positions[i] = struct.unpack_from("<3f", data, chunk_start + i * 12) + + elif cid == b"MOVI": + n = chunk_size // 2 + group.indices = np.frombuffer(data, dtype=np.uint16, + count=n, offset=chunk_start).copy() + + elif cid == b"MONR": + n = chunk_size // 12 + group.normals = np.zeros((n, 3), dtype=np.float32) + for i in range(n): + group.normals[i] = struct.unpack_from("<3f", data, chunk_start + i * 12) + + elif cid == b"MOTV": + n = chunk_size // 8 + group.uvs = np.zeros((n, 2), dtype=np.float32) + for i in range(n): + group.uvs[i] = struct.unpack_from("<2f", data, chunk_start + i * 8) + + elif cid == b"MOBA": + n = chunk_size // 24 + for i in range(n): + base = chunk_start + i * 24 + batch = WMOBatch() + batch.start_index = struct.unpack_from(" str: + """Resolve a MOTX byte offset to a texture filename.""" + idx = self.texture_offset_map.get(motx_offset) + if idx is not None and idx < len(self.textures): + return self.textures[idx] + return "" + + +# --------------------------------------------------------------------------- +# WMO Renderer +# --------------------------------------------------------------------------- + +class WMORenderer: + """OpenGL 3.3 renderer for WMO models.""" + + def __init__(self, parser: WMOParser, blp_paths: dict[str, str], blp_convert: str): + self.parser = parser + self.blp_paths = blp_paths + self.blp_convert_path = blp_convert + self.show_wireframe = False + + # Per-group GL state + self._group_vaos: list[int] = [] + self._group_vbos: list[int] = [] + self._group_ebos: list[int] = [] + self._group_n_indices: list[int] = [] + self._group_batches: list[list[WMOBatch]] = [] + + self.shader = 0 + self.wire_shader = 0 + self._gl = None + + # material_id -> GL texture id + self._mat_textures: dict[int, int] = {} + + def init_gl(self): + import OpenGL.GL as gl + self._gl = gl + + self.shader = self._compile_program(VERT_SHADER, FRAG_SHADER) + self.wire_shader = self._compile_program(WIRE_VERT, WIRE_FRAG) + + self._load_textures() + + for group in self.parser.groups: + self._upload_group(group) + + def _upload_group(self, group: WMOGroup): + gl = self._gl + n_verts = len(group.positions) + if n_verts == 0: + self._group_vaos.append(0) + self._group_vbos.append(0) + self._group_ebos.append(0) + self._group_n_indices.append(0) + self._group_batches.append([]) + return + + # Interleaved: pos(12) + normal(12) + uv(8) = 32 bytes + vbo_data = np.zeros((n_verts, 8), dtype=np.float32) + vbo_data[:, 0:3] = group.positions + if len(group.normals) == n_verts: + vbo_data[:, 3:6] = group.normals + if len(group.uvs) == n_verts: + vbo_data[:, 6:8] = group.uvs + + vao = gl.glGenVertexArrays(1) + vbo = gl.glGenBuffers(1) + ebo = gl.glGenBuffers(1) + + gl.glBindVertexArray(vao) + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, vbo) + gl.glBufferData(gl.GL_ARRAY_BUFFER, vbo_data.nbytes, vbo_data, gl.GL_STATIC_DRAW) + + n_idx = 0 + if len(group.indices) > 0: + idx_data = group.indices.astype(np.uint16) + n_idx = len(idx_data) + gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, ebo) + gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, idx_data.nbytes, idx_data, gl.GL_STATIC_DRAW) + + stride = 32 + gl.glVertexAttribPointer(0, 3, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(0)) + gl.glEnableVertexAttribArray(0) + gl.glVertexAttribPointer(1, 3, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(12)) + gl.glEnableVertexAttribArray(1) + gl.glVertexAttribPointer(2, 2, gl.GL_FLOAT, gl.GL_FALSE, stride, gl.ctypes.c_void_p(24)) + gl.glEnableVertexAttribArray(2) + gl.glBindVertexArray(0) + + self._group_vaos.append(vao) + self._group_vbos.append(vbo) + self._group_ebos.append(ebo) + self._group_n_indices.append(n_idx) + self._group_batches.append(group.batches) + + def _load_textures(self): + gl = self._gl + try: + from PIL import Image + except ImportError: + return + + cache_dir = Path(os.path.expanduser("~/.cache/m2_viewer")) + cache_dir.mkdir(parents=True, exist_ok=True) + + loaded: dict[str, int] = {} # filename -> GL tex id + + for mat_idx, mat in enumerate(self.parser.materials): + tex_name = self.parser.get_texture_name(mat.texture1_ofs) + if not tex_name: + continue + + if tex_name in loaded: + self._mat_textures[mat_idx] = loaded[tex_name] + continue + + norm = tex_name.replace("\\", "/") + blp_path = self.blp_paths.get(norm) or self.blp_paths.get(norm.lower()) + if not blp_path: + continue + + cache_key = hashlib.md5(blp_path.encode()).hexdigest() + cached_png = cache_dir / f"{cache_key}.png" + + if not cached_png.exists(): + try: + import tempfile + with tempfile.TemporaryDirectory() as tmpdir: + tmp_blp = Path(tmpdir) / Path(blp_path).name + shutil.copy2(blp_path, str(tmp_blp)) + result = subprocess.run( + [self.blp_convert_path, "--to-png", str(tmp_blp)], + capture_output=True, text=True, timeout=10, + ) + output_png = tmp_blp.with_suffix(".png") + if result.returncode != 0 or not output_png.exists(): + continue + shutil.move(str(output_png), str(cached_png)) + except Exception: + continue + + try: + img = Image.open(cached_png) + img = img.transpose(Image.FLIP_TOP_BOTTOM) + if img.mode != "RGBA": + img = img.convert("RGBA") + img_data = np.array(img, dtype=np.uint8) + + tex_id = gl.glGenTextures(1) + gl.glBindTexture(gl.GL_TEXTURE_2D, tex_id) + gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, img.width, img.height, + 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, img_data) + gl.glGenerateMipmap(gl.GL_TEXTURE_2D) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR_MIPMAP_LINEAR) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_REPEAT) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_REPEAT) + + loaded[tex_name] = tex_id + self._mat_textures[mat_idx] = tex_id + except Exception: + continue + + def _compile_program(self, vert_src: str, frag_src: str) -> int: + gl = self._gl + vs = gl.glCreateShader(gl.GL_VERTEX_SHADER) + gl.glShaderSource(vs, vert_src) + gl.glCompileShader(vs) + fs = gl.glCreateShader(gl.GL_FRAGMENT_SHADER) + gl.glShaderSource(fs, frag_src) + gl.glCompileShader(fs) + prog = gl.glCreateProgram() + gl.glAttachShader(prog, vs) + gl.glAttachShader(prog, fs) + gl.glLinkProgram(prog) + gl.glDeleteShader(vs) + gl.glDeleteShader(fs) + return prog + + def render(self, mvp: np.ndarray, model: np.ndarray): + gl = self._gl + gl.glEnable(gl.GL_DEPTH_TEST) + gl.glDisable(gl.GL_CULL_FACE) + + gl.glUseProgram(self.shader) + mvp_loc = gl.glGetUniformLocation(self.shader, "uMVP") + model_loc = gl.glGetUniformLocation(self.shader, "uModel") + tex_loc = gl.glGetUniformLocation(self.shader, "uTexture") + has_tex_loc = gl.glGetUniformLocation(self.shader, "uHasTexture") + light_loc = gl.glGetUniformLocation(self.shader, "uLightDir") + + gl.glUniformMatrix4fv(mvp_loc, 1, gl.GL_TRUE, mvp) + gl.glUniformMatrix4fv(model_loc, 1, gl.GL_TRUE, model) + gl.glUniform1i(tex_loc, 0) + + light_dir = np.array([0.5, 0.3, 0.8], dtype=np.float32) + light_dir /= np.linalg.norm(light_dir) + gl.glUniform3fv(light_loc, 1, light_dir) + + for gi in range(len(self._group_vaos)): + vao = self._group_vaos[gi] + n_idx = self._group_n_indices[gi] + batches = self._group_batches[gi] + if vao == 0 or n_idx == 0: + continue + + gl.glBindVertexArray(vao) + + if batches: + for batch in batches: + gl_tex = self._mat_textures.get(batch.material_id) + if gl_tex: + gl.glActiveTexture(gl.GL_TEXTURE0) + gl.glBindTexture(gl.GL_TEXTURE_2D, gl_tex) + gl.glUniform1i(has_tex_loc, 1) + else: + gl.glUniform1i(has_tex_loc, 0) + + si = batch.start_index + ic = batch.index_count + if si + ic <= n_idx: + gl.glDrawElements(gl.GL_TRIANGLES, ic, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(si * 2)) + else: + gl.glUniform1i(has_tex_loc, 0) + gl.glDrawElements(gl.GL_TRIANGLES, n_idx, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(0)) + + gl.glBindVertexArray(0) + + # Wireframe overlay + if self.show_wireframe: + gl.glUseProgram(self.wire_shader) + wire_mvp_loc = gl.glGetUniformLocation(self.wire_shader, "uMVP") + gl.glUniformMatrix4fv(wire_mvp_loc, 1, gl.GL_TRUE, mvp) + gl.glEnable(gl.GL_BLEND) + gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) + gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_LINE) + + for gi in range(len(self._group_vaos)): + vao = self._group_vaos[gi] + n_idx = self._group_n_indices[gi] + if vao == 0 or n_idx == 0: + continue + gl.glBindVertexArray(vao) + gl.glDrawElements(gl.GL_TRIANGLES, n_idx, gl.GL_UNSIGNED_SHORT, + gl.ctypes.c_void_p(0)) + gl.glBindVertexArray(0) + + gl.glPolygonMode(gl.GL_FRONT_AND_BACK, gl.GL_FILL) + gl.glDisable(gl.GL_BLEND) + + +# --------------------------------------------------------------------------- +# WMO Viewer Window +# --------------------------------------------------------------------------- + +class WMOViewerWindow: + """Pygame + OpenGL WMO model viewer window.""" + + def __init__(self, wmo_root_path: str, group_paths: list[str], + blp_paths: dict[str, str], blp_convert: str): + self.wmo_root_path = wmo_root_path + self.group_paths = group_paths + self.blp_paths = blp_paths + self.blp_convert = blp_convert + self.parser: WMOParser | None = None + self.renderer: WMORenderer | None = None + self.camera = OrbitCamera() + self.width = 1024 + self.height = 768 + self.running = True + self.fps_clock = None + self.font = None + self._dragging = False + self._panning = False + self._last_mouse = (0, 0) + + def run(self): + import pygame + from pygame.locals import ( + DOUBLEBUF, OPENGL, RESIZABLE, QUIT, KEYDOWN, MOUSEBUTTONDOWN, + MOUSEBUTTONUP, MOUSEMOTION, VIDEORESIZE, + ) + + # Parse WMO + self.parser = WMOParser() + + if self.wmo_root_path and Path(self.wmo_root_path).exists(): + self.parser.parse_root(Path(self.wmo_root_path).read_bytes()) + + total_verts = 0 + total_tris = 0 + for gp in self.group_paths: + if Path(gp).exists(): + group = self.parser.parse_group(Path(gp).read_bytes()) + self.parser.groups.append(group) + total_verts += len(group.positions) + total_tris += len(group.indices) // 3 + + if total_verts == 0: + print("No geometry found in WMO groups") + return + + # Auto-fit camera + all_pos = np.vstack([g.positions for g in self.parser.groups if len(g.positions) > 0]) + mins = all_pos.min(axis=0) + maxs = all_pos.max(axis=0) + center = (mins + maxs) / 2.0 + extent = np.linalg.norm(maxs - mins) + self.camera.target = center + self.camera.distance = max(extent * 1.2, 1.0) + + # Init Pygame + pygame.init() + name = Path(self.wmo_root_path or self.group_paths[0]).stem + pygame.display.set_caption(f"WMO Viewer — {name}") + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MAJOR_VERSION, 3) + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_MINOR_VERSION, 3) + pygame.display.gl_set_attribute(pygame.GL_CONTEXT_PROFILE_MASK, + pygame.GL_CONTEXT_PROFILE_CORE) + pygame.display.set_mode((self.width, self.height), DOUBLEBUF | OPENGL | RESIZABLE) + + self.fps_clock = pygame.time.Clock() + self.font = pygame.font.SysFont("monospace", 14) + + import OpenGL.GL as gl + + self.renderer = WMORenderer(self.parser, self.blp_paths, self.blp_convert) + self.renderer.init_gl() + + gl.glClearColor(0.12, 0.12, 0.18, 1.0) + + while self.running: + self.fps_clock.tick(60) + + for event in pygame.event.get(): + if event.type == QUIT: + self.running = False + elif event.type == VIDEORESIZE: + self.width, self.height = event.w, event.h + pygame.display.set_mode((self.width, self.height), + DOUBLEBUF | OPENGL | RESIZABLE) + elif event.type == KEYDOWN: + if event.key == pygame.K_ESCAPE: + self.running = False + elif event.key == pygame.K_w: + self.renderer.show_wireframe = not self.renderer.show_wireframe + elif event.type == MOUSEBUTTONDOWN: + if event.button == 1: + self._dragging = True + self._last_mouse = event.pos + elif event.button == 3: + self._panning = True + self._last_mouse = event.pos + elif event.button == 4: + self.camera.zoom(1) + elif event.button == 5: + self.camera.zoom(-1) + elif event.type == MOUSEBUTTONUP: + if event.button == 1: + self._dragging = False + elif event.button == 3: + self._panning = False + elif event.type == MOUSEMOTION: + if self._dragging: + dx = event.pos[0] - self._last_mouse[0] + dy = event.pos[1] - self._last_mouse[1] + self.camera.orbit(dx, dy) + self._last_mouse = event.pos + elif self._panning: + dx = event.pos[0] - self._last_mouse[0] + dy = event.pos[1] - self._last_mouse[1] + self.camera.pan(-dx, dy) + self._last_mouse = event.pos + + gl.glViewport(0, 0, self.width, self.height) + gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) + + aspect = self.width / max(self.height, 1) + proj = perspective(45.0, aspect, 0.1, 10000.0) + view = self.camera.get_view_matrix() + model_mat = np.eye(4, dtype=np.float32) + mvp = proj @ view @ model_mat + + self.renderer.render(mvp, model_mat) + + # HUD + self._draw_hud(pygame, gl, total_verts, total_tris) + + pygame.display.flip() + + pygame.quit() + + def _draw_hud(self, pygame, gl, total_verts, total_tris): + if not self.font: + return + name = Path(self.wmo_root_path or self.group_paths[0]).name + lines = [ + name, + f"{len(self.parser.groups)} groups, {total_verts} verts, {total_tris} tris", + f"{len(self.parser.materials)} materials, {len(self.parser.textures)} textures", + f"FPS: {self.fps_clock.get_fps():.0f}", + "", + "LMB: orbit | RMB: pan | Scroll: zoom", + "W: wireframe | Esc: quit", + ] + + line_height = 18 + total_height = len(lines) * line_height + 8 + surf_width = 420 + surf = pygame.Surface((surf_width, total_height), pygame.SRCALPHA) + surf.fill((0, 0, 0, 160)) + for i, line in enumerate(lines): + text_surf = self.font.render(line, True, (220, 220, 240)) + surf.blit(text_surf, (6, 4 + i * line_height)) + + text_data = pygame.image.tostring(surf, "RGBA", True) + tex_id = gl.glGenTextures(1) + gl.glBindTexture(gl.GL_TEXTURE_2D, tex_id) + gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RGBA, surf_width, total_height, + 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, text_data) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST) + gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST) + + # Blit using the same approach as M2ViewerWindow + if not hasattr(self, '_blit_shader'): + blit_vert = """ +#version 330 core +layout(location=0) in vec2 aPos; +layout(location=1) in vec2 aUV; +out vec2 vUV; +void main() { gl_Position = vec4(aPos, 0.0, 1.0); vUV = aUV; } +""" + blit_frag = """ +#version 330 core +in vec2 vUV; +uniform sampler2D uTex; +out vec4 FragColor; +void main() { FragColor = texture(uTex, vUV); } +""" + self._blit_shader = self.renderer._compile_program(blit_vert, blit_frag) + self._blit_vao = gl.glGenVertexArrays(1) + self._blit_vbo = gl.glGenBuffers(1) + + x, y, w, h = 8, self.height - total_height - 8, surf_width, total_height + x0 = 2.0 * x / self.width - 1.0 + y0 = 2.0 * y / self.height - 1.0 + x1 = 2.0 * (x + w) / self.width - 1.0 + y1 = 2.0 * (y + h) / self.height - 1.0 + + quad = np.array([ + x0, y0, 0, 0, x1, y0, 1, 0, x1, y1, 1, 1, + x0, y0, 0, 0, x1, y1, 1, 1, x0, y1, 0, 1, + ], dtype=np.float32) + + gl.glBindVertexArray(self._blit_vao) + gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._blit_vbo) + gl.glBufferData(gl.GL_ARRAY_BUFFER, quad.nbytes, quad, gl.GL_DYNAMIC_DRAW) + gl.glVertexAttribPointer(0, 2, gl.GL_FLOAT, gl.GL_FALSE, 16, gl.ctypes.c_void_p(0)) + gl.glEnableVertexAttribArray(0) + gl.glVertexAttribPointer(1, 2, gl.GL_FLOAT, gl.GL_FALSE, 16, gl.ctypes.c_void_p(8)) + gl.glEnableVertexAttribArray(1) + + gl.glDisable(gl.GL_DEPTH_TEST) + gl.glEnable(gl.GL_BLEND) + gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) + gl.glUseProgram(self._blit_shader) + gl.glUniform1i(gl.glGetUniformLocation(self._blit_shader, "uTex"), 0) + gl.glDrawArrays(gl.GL_TRIANGLES, 0, 6) + gl.glBindVertexArray(0) + gl.glEnable(gl.GL_DEPTH_TEST) + gl.glDisable(gl.GL_BLEND) + gl.glDeleteTextures(1, [tex_id]) + + +# --------------------------------------------------------------------------- +# Launch entry points (multiprocessing-safe) +# --------------------------------------------------------------------------- + +def _viewer_main(m2_path: str, blp_paths: dict[str, str], blp_convert: str): + """Entry point for M2 viewer subprocess.""" + viewer = M2ViewerWindow(m2_path, blp_paths, blp_convert) + viewer.run() + + +def _wmo_viewer_main(wmo_root: str, group_paths: list[str], + blp_paths: dict[str, str], blp_convert: str): + """Entry point for WMO viewer subprocess.""" + viewer = WMOViewerWindow(wmo_root, group_paths, blp_paths, blp_convert) + viewer.run() + + +def launch_m2_viewer(m2_path: str, blp_paths: dict[str, str], blp_convert: str): + """Launch M2 viewer in a separate process to avoid Tkinter/Pygame conflicts.""" + p = multiprocessing.Process(target=_viewer_main, args=(m2_path, blp_paths, blp_convert), + daemon=True) + p.start() + return p + + +def launch_wmo_viewer(wmo_root: str, group_paths: list[str], + blp_paths: dict[str, str], blp_convert: str): + """Launch WMO viewer in a separate process.""" + p = multiprocessing.Process(target=_wmo_viewer_main, + args=(wmo_root, group_paths, blp_paths, blp_convert), + daemon=True) + p.start() + return p + + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Usage: python m2_viewer.py [blp_convert_path]") + sys.exit(1) + + file_path = sys.argv[1] + blp_conv = sys.argv[2] if len(sys.argv) > 2 else "" + + if file_path.lower().endswith(".wmo"): + # Detect root vs group and find all group files + p = Path(file_path) + name = p.name.lower() + is_group = len(name) > 8 and name[-8:-4].isdigit() and name[-9] == "_" + + if is_group: + # Derive root from group + stem = p.stem + root_stem = stem.rsplit("_", 1)[0] + root_path = p.parent / f"{root_stem}.wmo" + groups = sorted(p.parent.glob(f"{root_stem}_*.wmo")) + else: + root_path = p + stem = p.stem + groups = sorted(p.parent.glob(f"{stem}_*.wmo")) + + root_str = str(root_path) if root_path.exists() else "" + group_strs = [str(g) for g in groups] + if not group_strs and is_group: + group_strs = [file_path] + + viewer = WMOViewerWindow(root_str, group_strs, {}, blp_conv) + viewer.run() + else: + viewer = M2ViewerWindow(file_path, {}, blp_conv) + viewer.run() From 60c26a17aa381a6e45ff5bbc1c08a3b50b0a346d Mon Sep 17 00:00:00 2001 From: Kelsi Date: Mon, 23 Feb 2026 22:26:17 -0800 Subject: [PATCH 10/10] Fix audio playback not stopping when Stop button clicked Use spawn context for clean subprocess isolation and add kill() fallback after terminate() to ensure the audio process is reliably stopped. --- tools/asset_pipeline_gui.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tools/asset_pipeline_gui.py b/tools/asset_pipeline_gui.py index c784ef35..965b36e0 100755 --- a/tools/asset_pipeline_gui.py +++ b/tools/asset_pipeline_gui.py @@ -1800,7 +1800,8 @@ class AssetPipelineGUI: self._browser_stop_audio() try: import multiprocessing - self._audio_proc = multiprocessing.Process( + ctx = multiprocessing.get_context("spawn") + self._audio_proc = ctx.Process( target=_audio_subprocess, args=(str(file_path),), daemon=True) self._audio_proc.start() self._audio_status_var.set("Playing...") @@ -1818,7 +1819,10 @@ class AssetPipelineGUI: proc = getattr(self, "_audio_proc", None) if proc and proc.is_alive(): proc.terminate() - proc.join(timeout=1) + proc.join(timeout=0.5) + if proc.is_alive(): + proc.kill() + proc.join(timeout=0.5) self._audio_proc = None # ── Hex Dump Preview ──