mirror of
https://github.com/DaanVandenBosch/phantasmal-world.git
synced 2025-04-04 22:58:29 +08:00
Most data format parsing functions now return a Result type instead of logging and/or throwing when a problem is encountered.
This commit is contained in:
parent
0feb6608d1
commit
94d15b86ec
@ -14,6 +14,8 @@ import { QuestDto } from "../src/hunt_optimizer/dto/QuestDto";
|
||||
import { BoxDropDto, EnemyDropDto } from "../src/hunt_optimizer/dto/drops";
|
||||
import { LogManager } from "../src/core/Logger";
|
||||
import { Severity } from "../src/core/Severity";
|
||||
import { unwrap } from "../src/core/Result";
|
||||
import { get_npc_type } from "../src/core/data_formats/parsing/quest/QuestNpc";
|
||||
|
||||
const logger = LogManager.get("assets_generation/update_ephinea_data");
|
||||
|
||||
@ -112,7 +114,7 @@ function process_quest_dir(path: string, quests: QuestDto[]): void {
|
||||
function process_quest(path: string, quests: QuestDto[]): void {
|
||||
try {
|
||||
const buf = readFileSync(path);
|
||||
const q = parse_qst_to_quest(new BufferCursor(buf, Endianness.Little), true)?.quest;
|
||||
const q = parse_qst_to_quest(new BufferCursor(buf, Endianness.Little), true).value?.quest;
|
||||
|
||||
if (q) {
|
||||
logger.trace(`Processing quest "${q.name}".`);
|
||||
@ -124,8 +126,10 @@ function process_quest(path: string, quests: QuestDto[]): void {
|
||||
const enemy_counts: { [npc_type_code: string]: number } = {};
|
||||
|
||||
for (const npc of q.npcs) {
|
||||
if (npc_data(npc.type).enemy) {
|
||||
enemy_counts[NpcType[npc.type]] = (enemy_counts[NpcType[npc.type]] || 0) + 1;
|
||||
const type = get_npc_type(npc);
|
||||
|
||||
if (npc_data(type).enemy) {
|
||||
enemy_counts[NpcType[type]] = (enemy_counts[NpcType[type]] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,7 +152,7 @@ function load_unitxt(): Unitxt {
|
||||
|
||||
const buf = readFileSync(`${EPHINEA_RESOURCE_DIR}/client/data/unitxt_j.prs`);
|
||||
|
||||
const unitxt = parse_unitxt(new BufferCursor(buf, Endianness.Little));
|
||||
const unitxt = unwrap(parse_unitxt(new BufferCursor(buf, Endianness.Little)));
|
||||
// Strip custom Ephinea items until we have the Ephinea ItemPMT.bin.
|
||||
unitxt[1].splice(177, 50);
|
||||
unitxt[1].splice(639, 59);
|
||||
|
@ -6,6 +6,7 @@ import * as yaml from "yaml";
|
||||
import { Endianness } from "../src/core/data_formats/block/Endianness";
|
||||
import { LogManager } from "../src/core/Logger";
|
||||
import { Severity } from "../src/core/Severity";
|
||||
import { unwrap } from "../src/core/Result";
|
||||
|
||||
const logger = LogManager.get("assets_generation/update_generic_data");
|
||||
|
||||
@ -31,7 +32,7 @@ function extract_player_animations(): void {
|
||||
const buf = readFileSync(`${RESOURCE_DIR}/plymotiondata.rlc`);
|
||||
let i = 0;
|
||||
|
||||
for (const file of parse_rlc(new BufferCursor(buf, Endianness.Big))) {
|
||||
for (const file of unwrap(parse_rlc(new BufferCursor(buf, Endianness.Big)))) {
|
||||
writeFileSync(
|
||||
`${ASSETS_DIR}/player/animation/animation_${(i++).toString().padStart(3, "0")}.njm`,
|
||||
new Uint8Array(file.array_buffer()),
|
||||
@ -46,7 +47,7 @@ function update_opcodes(): void {
|
||||
|
||||
// Add manual code.
|
||||
const opcodes_src = readFileSync(OPCODES_SRC_FILE, {
|
||||
encoding: "UTF-8",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
const file_lines: string[] = [];
|
||||
let in_manual_code = true;
|
||||
@ -69,7 +70,7 @@ function update_opcodes(): void {
|
||||
});
|
||||
|
||||
// Add generated code.
|
||||
const yml = readFileSync(OPCODES_YML_FILE, { encoding: "UTF-8" });
|
||||
const yml = readFileSync(OPCODES_YML_FILE, { encoding: "utf-8" });
|
||||
const input = yaml.parse(yml);
|
||||
const generated_lines: string[] = [];
|
||||
let i = 0;
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { Severity, severity_from_string } from "./Severity";
|
||||
import { basename } from "./util";
|
||||
|
||||
export type LogEntry = {
|
||||
readonly time: Date;
|
||||
@ -113,6 +114,8 @@ export class LogManager {
|
||||
static default_handler: LogHandler = default_log_handler;
|
||||
|
||||
static get(name: string): Logger {
|
||||
name = basename(name);
|
||||
|
||||
let logger = this.loggers.get(name);
|
||||
|
||||
if (!logger) {
|
||||
|
@ -23,21 +23,25 @@ export type Problem = {
|
||||
readonly ui_message: string;
|
||||
};
|
||||
|
||||
export function success<T>(value: T, problems?: readonly Problem[]): Success<T> {
|
||||
export function success<T>(value: T, ...problems: readonly Problem[]): Success<T> {
|
||||
return {
|
||||
success: true,
|
||||
value,
|
||||
problems: problems ?? [],
|
||||
problems,
|
||||
};
|
||||
}
|
||||
|
||||
export function failure(problems?: readonly Problem[]): Failure {
|
||||
export function failure(...problems: readonly Problem[]): Failure {
|
||||
return {
|
||||
success: false,
|
||||
problems: problems ?? [],
|
||||
problems,
|
||||
};
|
||||
}
|
||||
|
||||
export function problem(severity: Severity, ui_message: string): Problem {
|
||||
return { severity, ui_message };
|
||||
}
|
||||
|
||||
/**
|
||||
* "Unwraps" the given result by either return its value if it's a success or throwing an error with
|
||||
* its problems as message if it was a failure.
|
||||
@ -50,13 +54,8 @@ export function unwrap<T>(result: Result<T>): T {
|
||||
}
|
||||
}
|
||||
|
||||
export function result_builder<T>(logger: Logger): ResultBuilder<T> {
|
||||
return new ResultBuilder(logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for building up a {@link Result} and logging problems at the same time. Use
|
||||
* {@link result_builder} to instantiate.
|
||||
* Useful for building up a {@link Result} and logging problems at the same time.
|
||||
*/
|
||||
export class ResultBuilder<T> {
|
||||
private readonly problems: Problem[] = [];
|
||||
@ -66,8 +65,8 @@ export class ResultBuilder<T> {
|
||||
/**
|
||||
* Add a problem to the problems array and log it with {@link logger}.
|
||||
*/
|
||||
add_problem(severity: Severity, ui_message: string, message: string, cause?: unknown): this {
|
||||
this.logger.log(severity, message, cause);
|
||||
add_problem(severity: Severity, ui_message: string, message?: string, cause?: unknown): this {
|
||||
this.logger.log(severity, message ?? ui_message, cause);
|
||||
this.problems.push({ severity, ui_message });
|
||||
return this;
|
||||
}
|
||||
@ -81,10 +80,10 @@ export class ResultBuilder<T> {
|
||||
}
|
||||
|
||||
success(value: T): Success<T> {
|
||||
return success(value, this.problems);
|
||||
return success(value, ...this.problems);
|
||||
}
|
||||
|
||||
failure(): Failure {
|
||||
return failure(this.problems);
|
||||
return failure(...this.problems);
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,8 @@ import { ResizableBlock } from "../../block/ResizableBlock";
|
||||
import { LogManager } from "../../../Logger";
|
||||
import { browser_supports_webassembly } from "../../../util";
|
||||
import { get_prs_wasm_module } from "./prs_wasm";
|
||||
import { Result, ResultBuilder, success } from "../../../Result";
|
||||
import { Severity } from "../../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/compression/prs/decompress");
|
||||
|
||||
@ -13,11 +15,17 @@ const prs_wasm = get_prs_wasm_module();
|
||||
/**
|
||||
* Automatically picks the best available decompression method.
|
||||
*/
|
||||
export function prs_decompress(cursor: Cursor): Cursor {
|
||||
if (browser_supports_webassembly() && prs_wasm) {
|
||||
return prs_wasm.prs_decompress_wasm(cursor);
|
||||
} else {
|
||||
return prs_decompress_js(cursor);
|
||||
export function prs_decompress(cursor: Cursor): Result<Cursor> {
|
||||
try {
|
||||
if (browser_supports_webassembly() && prs_wasm) {
|
||||
return success(prs_wasm.prs_decompress_wasm(cursor));
|
||||
} else {
|
||||
return success(prs_decompress_js(cursor));
|
||||
}
|
||||
} catch (e) {
|
||||
return new ResultBuilder(logger)
|
||||
.add_problem(Severity.Error, "PRS-compressed stream is corrupt.", undefined, e)
|
||||
.failure();
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,11 +120,11 @@ class Context {
|
||||
|
||||
offset_copy(offset: number, length: number): void {
|
||||
if (offset < -8192 || offset > 0) {
|
||||
logger.error(`offset was ${offset}, should be between -8192 and 0.`);
|
||||
throw new Error(`offset was ${offset}, should be between -8192 and 0.`);
|
||||
}
|
||||
|
||||
if (length < 1 || length > 256) {
|
||||
logger.error(`length was ${length}, should be between 1 and 256.`);
|
||||
throw new Error(`length was ${length}, should be between 1 and 256.`);
|
||||
}
|
||||
|
||||
// The length can be larger than -offset, in that case we copy -offset bytes size/-offset times.
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Cursor } from "../block/cursor/Cursor";
|
||||
import { LogManager } from "../../Logger";
|
||||
import { Result, result_builder } from "../../Result";
|
||||
import { Result, ResultBuilder } from "../../Result";
|
||||
import { Severity } from "../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/afs");
|
||||
@ -14,7 +14,7 @@ const AFS = 0x00534641;
|
||||
* @returns the contained files
|
||||
*/
|
||||
export function parse_afs(cursor: Cursor): Result<ArrayBuffer[]> {
|
||||
const result = result_builder<ArrayBuffer[]>(logger);
|
||||
const result = new ResultBuilder<ArrayBuffer[]>(logger);
|
||||
|
||||
if (cursor.bytes_left < 8) {
|
||||
return result
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Cursor } from "../block/cursor/Cursor";
|
||||
import { Result, result_builder } from "../../Result";
|
||||
import { Result, ResultBuilder } from "../../Result";
|
||||
import { LogManager } from "../../Logger";
|
||||
import { Severity } from "../../Severity";
|
||||
|
||||
@ -47,7 +47,7 @@ function parse<T>(
|
||||
chunks: T[],
|
||||
get_chunk: (cursor: Cursor, type: number, size: number) => T,
|
||||
): Result<T[]> {
|
||||
const result = result_builder<T[]>(logger);
|
||||
const result = new ResultBuilder<T[]>(logger);
|
||||
let corrupted = false;
|
||||
|
||||
while (cursor.bytes_left >= 8) {
|
||||
|
@ -136,7 +136,7 @@ function parse_ninja<M extends NjModel>(
|
||||
objects.push(...parse_sibling_objects(chunk.data, parse_model, context));
|
||||
}
|
||||
|
||||
return success(objects, parse_iff_result.problems);
|
||||
return success(objects, ...parse_iff_result.problems);
|
||||
}
|
||||
|
||||
// TODO: cache model and object offsets so we don't reparse the same data.
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Cursor } from "../../block/cursor/Cursor";
|
||||
import { parse_iff, parse_iff_headers } from "../iff";
|
||||
import { LogManager } from "../../../Logger";
|
||||
import { Result, result_builder } from "../../../Result";
|
||||
import { Result, ResultBuilder } from "../../../Result";
|
||||
import { Severity } from "../../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/ninja/texture");
|
||||
@ -62,7 +62,7 @@ export function parse_xvm(cursor: Cursor): Result<Xvm> {
|
||||
return iff_result;
|
||||
}
|
||||
|
||||
const result = result_builder<Xvm>(logger);
|
||||
const result = new ResultBuilder<Xvm>(logger);
|
||||
result.add_result(iff_result);
|
||||
const chunks = iff_result.value;
|
||||
const header_chunk = chunks.find(chunk => chunk.type === XVMH);
|
||||
|
@ -2,23 +2,32 @@ import { prs_decompress } from "../compression/prs/decompress";
|
||||
import { Cursor } from "../block/cursor/Cursor";
|
||||
import { prc_decrypt } from "../encryption/prc";
|
||||
import { LogManager } from "../../Logger";
|
||||
import { Result, ResultBuilder } from "../../Result";
|
||||
import { Severity } from "../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/prc");
|
||||
|
||||
/**
|
||||
* Decrypts and decompresses a .prc file.
|
||||
*/
|
||||
export function parse_prc(cursor: Cursor): Cursor {
|
||||
export function parse_prc(cursor: Cursor): Result<Cursor> {
|
||||
const rb = new ResultBuilder<Cursor>(logger);
|
||||
// Unencrypted, decompressed size.
|
||||
const size = cursor.u32();
|
||||
const key = cursor.u32();
|
||||
const out = prs_decompress(prc_decrypt(key, cursor));
|
||||
rb.add_result(out);
|
||||
|
||||
if (out.size !== size) {
|
||||
logger.warn(
|
||||
`Size of decrypted, decompressed file was ${out.size} instead of expected ${size}.`,
|
||||
if (!out.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
if (out.value.size !== size) {
|
||||
rb.add_problem(
|
||||
Severity.Warning,
|
||||
`Size of decrypted, decompressed file was ${out.value.size} instead of expected ${size}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return out;
|
||||
return rb.success(out.value);
|
||||
}
|
||||
|
@ -5,13 +5,14 @@ import { ArrayBufferCursor } from "../../block/cursor/ArrayBufferCursor";
|
||||
import { BufferCursor } from "../../block/cursor/BufferCursor";
|
||||
import { parse_bin, write_bin } from "./bin";
|
||||
import { BinFormat } from "./BinFormat";
|
||||
import { unwrap } from "../../../Result";
|
||||
|
||||
/**
|
||||
* Parse a file, convert the resulting structure to BIN again and check whether the end result is equal to the original.
|
||||
*/
|
||||
function test_quest(path: string): void {
|
||||
const orig_buffer = readFileSync(path);
|
||||
const orig_bin = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_bin = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const test_buffer = write_bin(parse_bin(orig_bin).bin, BinFormat.BB);
|
||||
const test_bin = new ArrayBufferCursor(test_buffer, Endianness.Little);
|
||||
|
||||
|
@ -4,13 +4,14 @@ import { BufferCursor } from "../../block/cursor/BufferCursor";
|
||||
import { ResizableBlockCursor } from "../../block/cursor/ResizableBlockCursor";
|
||||
import { parse_dat, write_dat } from "./dat";
|
||||
import { readFileSync } from "fs";
|
||||
import { unwrap } from "../../../Result";
|
||||
|
||||
/**
|
||||
* Parse a file, convert the resulting structure to DAT again and check whether the end result is equal to the original.
|
||||
*/
|
||||
test("parse_dat and write_dat", () => {
|
||||
const orig_buffer = readFileSync("test/resources/quest118_e.dat");
|
||||
const orig_dat = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_dat = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const test_dat = new ResizableBlockCursor(write_dat(parse_dat(orig_dat)));
|
||||
orig_dat.seek_start(0);
|
||||
|
||||
@ -33,7 +34,7 @@ test("parse_dat and write_dat", () => {
|
||||
*/
|
||||
test("parse, modify and write DAT", () => {
|
||||
const orig_buffer = readFileSync("./test/resources/quest118_e.dat");
|
||||
const orig_dat = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_dat = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const test_parsed = parse_dat(orig_dat);
|
||||
orig_dat.seek_start(0);
|
||||
|
||||
|
@ -13,11 +13,12 @@ import {
|
||||
} from "../../asm/instructions";
|
||||
import { get_object_position, get_object_section_id, get_object_type } from "./QuestObject";
|
||||
import { get_npc_position, get_npc_section_id, get_npc_type } from "./QuestNpc";
|
||||
import { unwrap } from "../../../Result";
|
||||
|
||||
test("parse Towards the Future", () => {
|
||||
const buffer = readFileSync("test/resources/quest118_e.qst");
|
||||
const cursor = new BufferCursor(buffer, Endianness.Little);
|
||||
const { quest } = parse_qst_to_quest(cursor)!;
|
||||
const { quest } = unwrap(parse_qst_to_quest(cursor));
|
||||
|
||||
expect(quest.name).toBe("Towards the Future");
|
||||
expect(quest.short_description).toBe("Challenge the\nnew simulator.");
|
||||
@ -73,13 +74,13 @@ round_trip_test(
|
||||
|
||||
function round_trip_test(path: string, file_name: string, contents: Buffer): void {
|
||||
test(`parse_quest and write_quest_qst ${path}`, () => {
|
||||
const { quest: orig_quest, version, online } = parse_qst_to_quest(
|
||||
new BufferCursor(contents, Endianness.Little),
|
||||
)!;
|
||||
const { quest: orig_quest, version, online } = unwrap(
|
||||
parse_qst_to_quest(new BufferCursor(contents, Endianness.Little)),
|
||||
);
|
||||
const test_qst = write_quest_qst(orig_quest, file_name, version, online);
|
||||
const { quest: test_quest } = parse_qst_to_quest(
|
||||
new ArrayBufferCursor(test_qst, Endianness.Little),
|
||||
)!;
|
||||
const { quest: test_quest } = unwrap(
|
||||
parse_qst_to_quest(new ArrayBufferCursor(test_qst, Endianness.Little)),
|
||||
);
|
||||
|
||||
expect(test_quest.name).toBe(orig_quest.name);
|
||||
expect(test_quest.short_description).toBe(orig_quest.short_description);
|
||||
|
@ -24,6 +24,8 @@ import {
|
||||
get_object_script_label_2,
|
||||
QuestObject,
|
||||
} from "./QuestObject";
|
||||
import { Result, ResultBuilder } from "../../../Result";
|
||||
import { Severity } from "../../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/quest");
|
||||
|
||||
@ -31,13 +33,27 @@ export function parse_bin_dat_to_quest(
|
||||
bin_cursor: Cursor,
|
||||
dat_cursor: Cursor,
|
||||
lenient: boolean = false,
|
||||
): Quest | undefined {
|
||||
): Result<Quest> {
|
||||
const rb = new ResultBuilder<Quest>(logger);
|
||||
|
||||
// Decompress and parse files.
|
||||
const bin_decompressed = prs_decompress(bin_cursor);
|
||||
const { bin, format } = parse_bin(bin_decompressed);
|
||||
rb.add_result(bin_decompressed);
|
||||
|
||||
if (!bin_decompressed.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
const { bin, format } = parse_bin(bin_decompressed.value);
|
||||
|
||||
const dat_decompressed = prs_decompress(dat_cursor);
|
||||
const dat = parse_dat(dat_decompressed);
|
||||
rb.add_result(dat_decompressed);
|
||||
|
||||
if (!dat_decompressed.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
const dat = parse_dat(dat_decompressed.value);
|
||||
const objects = dat.objs.map(({ area_id, data }) => data_to_quest_object(area_id, data));
|
||||
// Initialize NPCs with random episode and correct it later.
|
||||
const npcs = dat.npcs.map(({ area_id, data }) => data_to_quest_npc(Episode.I, area_id, data));
|
||||
@ -46,7 +62,7 @@ export function parse_bin_dat_to_quest(
|
||||
let episode = Episode.I;
|
||||
let map_designations: Map<number, number> = new Map();
|
||||
|
||||
const object_code = parse_object_code(
|
||||
const object_code_result = parse_object_code(
|
||||
bin.object_code,
|
||||
bin.label_offsets,
|
||||
extract_script_entry_points(objects, npcs),
|
||||
@ -54,6 +70,14 @@ export function parse_bin_dat_to_quest(
|
||||
format,
|
||||
);
|
||||
|
||||
rb.add_result(object_code_result);
|
||||
|
||||
if (!object_code_result.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
const object_code = object_code_result.value;
|
||||
|
||||
if (object_code.length) {
|
||||
const instruction_segments = object_code.filter(
|
||||
s => s.type === SegmentType.Instructions,
|
||||
@ -69,7 +93,7 @@ export function parse_bin_dat_to_quest(
|
||||
}
|
||||
|
||||
if (label_0_segment) {
|
||||
episode = get_episode(label_0_segment);
|
||||
episode = get_episode(rb, label_0_segment);
|
||||
|
||||
for (const npc of npcs) {
|
||||
npc.episode = episode;
|
||||
@ -77,13 +101,13 @@ export function parse_bin_dat_to_quest(
|
||||
|
||||
map_designations = get_map_designations(instruction_segments, label_0_segment);
|
||||
} else {
|
||||
logger.warn(`No instruction for label 0 found.`);
|
||||
rb.add_problem(Severity.Warning, "No instruction segment for label 0 found.");
|
||||
}
|
||||
} else {
|
||||
logger.warn("File contains no instruction labels.");
|
||||
rb.add_problem(Severity.Warning, "File contains no instruction labels.");
|
||||
}
|
||||
|
||||
return {
|
||||
return rb.success({
|
||||
id: bin.quest_id,
|
||||
language: bin.language,
|
||||
name: bin.quest_name,
|
||||
@ -97,24 +121,31 @@ export function parse_bin_dat_to_quest(
|
||||
object_code,
|
||||
shop_items: bin.shop_items,
|
||||
map_designations,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function parse_qst_to_quest(
|
||||
cursor: Cursor,
|
||||
lenient: boolean = false,
|
||||
): { quest: Quest; version: Version; online: boolean } | undefined {
|
||||
// Extract contained .dat and .bin files.
|
||||
const qst = parse_qst(cursor);
|
||||
export type QuestData = {
|
||||
quest: Quest;
|
||||
version: Version;
|
||||
online: boolean;
|
||||
};
|
||||
|
||||
if (!qst) {
|
||||
return;
|
||||
export function parse_qst_to_quest(cursor: Cursor, lenient: boolean = false): Result<QuestData> {
|
||||
const rb = new ResultBuilder<QuestData>(logger);
|
||||
|
||||
// Extract contained .dat and .bin files.
|
||||
const qst_result = parse_qst(cursor);
|
||||
rb.add_result(qst_result);
|
||||
|
||||
if (!qst_result.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
const { version, online, files } = qst_result.value;
|
||||
let dat_file: QstContainedFile | undefined;
|
||||
let bin_file: QstContainedFile | undefined;
|
||||
|
||||
for (const file of qst.files) {
|
||||
for (const file of files) {
|
||||
const file_name = file.filename.trim().toLowerCase();
|
||||
|
||||
if (file_name.endsWith(".dat")) {
|
||||
@ -125,22 +156,29 @@ export function parse_qst_to_quest(
|
||||
}
|
||||
|
||||
if (!dat_file) {
|
||||
logger.error("File contains no DAT file.");
|
||||
return;
|
||||
return rb.add_problem(Severity.Error, "File contains no DAT file.").failure();
|
||||
}
|
||||
|
||||
if (!bin_file) {
|
||||
logger.error("File contains no BIN file.");
|
||||
return;
|
||||
return rb.add_problem(Severity.Error, "File contains no BIN file.").failure();
|
||||
}
|
||||
|
||||
const quest = parse_bin_dat_to_quest(
|
||||
const quest_result = parse_bin_dat_to_quest(
|
||||
new ArrayBufferCursor(bin_file.data, Endianness.Little),
|
||||
new ArrayBufferCursor(dat_file.data, Endianness.Little),
|
||||
lenient,
|
||||
);
|
||||
rb.add_result(quest_result);
|
||||
|
||||
return quest && { quest, version: qst.version, online: qst.online };
|
||||
if (!quest_result.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
return rb.success({
|
||||
quest: quest_result.value,
|
||||
version,
|
||||
online,
|
||||
});
|
||||
}
|
||||
|
||||
export function write_quest_qst(
|
||||
@ -200,7 +238,7 @@ export function write_quest_qst(
|
||||
/**
|
||||
* Defaults to episode I.
|
||||
*/
|
||||
function get_episode(func_0_segment: InstructionSegment): Episode {
|
||||
function get_episode(rb: ResultBuilder<unknown>, func_0_segment: InstructionSegment): Episode {
|
||||
const set_episode = func_0_segment.instructions.find(
|
||||
instruction => instruction.opcode.code === OP_SET_EPISODE.code,
|
||||
);
|
||||
@ -216,7 +254,10 @@ function get_episode(func_0_segment: InstructionSegment): Episode {
|
||||
case 2:
|
||||
return Episode.IV;
|
||||
default:
|
||||
logger.warn(`Unknown episode ${episode} in function 0 set_episode instruction.`);
|
||||
rb.add_problem(
|
||||
Severity.Warning,
|
||||
`Unknown episode ${episode} in function 0 set_episode instruction.`,
|
||||
);
|
||||
return Episode.I;
|
||||
}
|
||||
} else {
|
||||
|
@ -21,6 +21,9 @@ import { LogManager } from "../../../Logger";
|
||||
import { ResizableBlockCursor } from "../../block/cursor/ResizableBlockCursor";
|
||||
import { ResizableBlock } from "../../block/ResizableBlock";
|
||||
import { BinFormat } from "./BinFormat";
|
||||
import { Result, ResultBuilder } from "../../../Result";
|
||||
import { Severity } from "../../../Severity";
|
||||
import { assert } from "../../../util";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/quest/object_code");
|
||||
|
||||
@ -35,14 +38,114 @@ export function parse_object_code(
|
||||
entry_labels: readonly number[],
|
||||
lenient: boolean,
|
||||
format: BinFormat,
|
||||
): Segment[] {
|
||||
return internal_parse_object_code(
|
||||
new ArrayBufferCursor(object_code, Endianness.Little),
|
||||
new LabelHolder(label_offsets),
|
||||
entry_labels,
|
||||
): Result<Segment[]> {
|
||||
const cursor = new ArrayBufferCursor(object_code, Endianness.Little);
|
||||
const label_holder = new LabelHolder(label_offsets);
|
||||
const result = new ResultBuilder<Segment[]>(logger);
|
||||
const offset_to_segment = new Map<number, Segment>();
|
||||
|
||||
find_and_parse_segments(
|
||||
cursor,
|
||||
label_holder,
|
||||
entry_labels.reduce((m, l) => m.set(l, SegmentType.Instructions), new Map()),
|
||||
offset_to_segment,
|
||||
lenient,
|
||||
format,
|
||||
);
|
||||
|
||||
const segments: Segment[] = [];
|
||||
|
||||
// Put segments in an array and parse left-over segments as data.
|
||||
let offset = 0;
|
||||
|
||||
while (offset < cursor.size) {
|
||||
let segment: Segment | undefined = offset_to_segment.get(offset);
|
||||
|
||||
// If we have a segment, add it. Otherwise create a new data segment.
|
||||
if (!segment) {
|
||||
const labels = label_holder.get_labels(offset);
|
||||
let end_offset: number;
|
||||
|
||||
if (labels) {
|
||||
const info = label_holder.get_info(labels[0])!;
|
||||
end_offset = info.next ? info.next.offset : cursor.size;
|
||||
} else {
|
||||
end_offset = cursor.size;
|
||||
|
||||
for (const label of label_holder.labels) {
|
||||
if (label.offset > offset) {
|
||||
end_offset = label.offset;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cursor.seek_start(offset);
|
||||
parse_data_segment(offset_to_segment, cursor, end_offset, labels || []);
|
||||
|
||||
segment = offset_to_segment.get(offset);
|
||||
|
||||
assert(
|
||||
end_offset > offset,
|
||||
() =>
|
||||
`Next offset ${end_offset} was smaller than or equal to current offset ${offset}.`,
|
||||
);
|
||||
assert(segment, () => `Couldn't create segment for offset ${offset}.`);
|
||||
}
|
||||
|
||||
segments.push(segment);
|
||||
|
||||
switch (segment.type) {
|
||||
case SegmentType.Instructions:
|
||||
for (const instruction of segment.instructions) {
|
||||
offset += instruction_size(instruction, format);
|
||||
}
|
||||
|
||||
break;
|
||||
case SegmentType.Data:
|
||||
offset += segment.data.byteLength;
|
||||
break;
|
||||
case SegmentType.String:
|
||||
// String segments should be multiples of 4 bytes.
|
||||
offset += 4 * Math.ceil((segment.value.length + 1) / 2);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`${SegmentType[segment!.type]} not implemented.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add unreferenced labels to their segment.
|
||||
for (const { label, offset } of label_holder.labels) {
|
||||
const segment = offset_to_segment.get(offset);
|
||||
|
||||
if (segment) {
|
||||
if (!segment.labels.includes(label)) {
|
||||
segment.labels.push(label);
|
||||
segment.labels.sort((a, b) => a - b);
|
||||
}
|
||||
} else {
|
||||
result.add_problem(
|
||||
Severity.Warning,
|
||||
`Label ${label} doesn't point to anything.`,
|
||||
`Label ${label} with offset ${offset} doesn't point to anything.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check parsed object code.
|
||||
if (cursor.size !== offset) {
|
||||
result.add_problem(
|
||||
Severity.Error,
|
||||
"The script code is corrupt.",
|
||||
`Expected to parse ${cursor.size} bytes but parsed ${offset} instead.`,
|
||||
);
|
||||
|
||||
if (!lenient) {
|
||||
return result.failure();
|
||||
}
|
||||
}
|
||||
|
||||
return result.success(segments);
|
||||
}
|
||||
|
||||
export function write_object_code(
|
||||
@ -166,120 +269,6 @@ export function write_object_code(
|
||||
return { object_code: cursor.seek_start(0).array_buffer(), label_offsets };
|
||||
}
|
||||
|
||||
function internal_parse_object_code(
|
||||
cursor: Cursor,
|
||||
label_holder: LabelHolder,
|
||||
entry_labels: readonly number[],
|
||||
lenient: boolean,
|
||||
format: BinFormat,
|
||||
): Segment[] {
|
||||
const offset_to_segment = new Map<number, Segment>();
|
||||
|
||||
find_and_parse_segments(
|
||||
cursor,
|
||||
label_holder,
|
||||
entry_labels.reduce((m, l) => m.set(l, SegmentType.Instructions), new Map()),
|
||||
offset_to_segment,
|
||||
lenient,
|
||||
format,
|
||||
);
|
||||
|
||||
const segments: Segment[] = [];
|
||||
|
||||
// Put segments in an array and parse left-over segments as data.
|
||||
let offset = 0;
|
||||
|
||||
while (offset < cursor.size) {
|
||||
let segment: Segment | undefined = offset_to_segment.get(offset);
|
||||
|
||||
// If we have a segment, add it. Otherwise create a new data segment.
|
||||
if (!segment) {
|
||||
const labels = label_holder.get_labels(offset);
|
||||
let end_offset: number;
|
||||
|
||||
if (labels) {
|
||||
const info = label_holder.get_info(labels[0])!;
|
||||
end_offset = info.next ? info.next.offset : cursor.size;
|
||||
} else {
|
||||
end_offset = cursor.size;
|
||||
|
||||
for (const label of label_holder.labels) {
|
||||
if (label.offset > offset) {
|
||||
end_offset = label.offset;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cursor.seek_start(offset);
|
||||
parse_data_segment(offset_to_segment, cursor, end_offset, labels || []);
|
||||
|
||||
segment = offset_to_segment.get(offset);
|
||||
|
||||
// Should never happen.
|
||||
if (end_offset <= offset) {
|
||||
logger.error(
|
||||
`Next offset ${end_offset} was smaller than or equal to current offset ${offset}.`,
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
// Should never happen either.
|
||||
if (!segment) {
|
||||
logger.error(`Couldn't create segment for offset ${offset}.`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
segments.push(segment);
|
||||
|
||||
switch (segment.type) {
|
||||
case SegmentType.Instructions:
|
||||
for (const instruction of segment.instructions) {
|
||||
offset += instruction_size(instruction, format);
|
||||
}
|
||||
|
||||
break;
|
||||
case SegmentType.Data:
|
||||
offset += segment.data.byteLength;
|
||||
break;
|
||||
case SegmentType.String:
|
||||
// String segments should be multiples of 4 bytes.
|
||||
offset += 4 * Math.ceil((segment.value.length + 1) / 2);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`${SegmentType[segment!.type]} not implemented.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add unreferenced labels to their segment.
|
||||
for (const { label, offset } of label_holder.labels) {
|
||||
const segment = offset_to_segment.get(offset);
|
||||
|
||||
if (segment) {
|
||||
if (!segment.labels.includes(label)) {
|
||||
segment.labels.push(label);
|
||||
segment.labels.sort((a, b) => a - b);
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Label ${label} with offset ${offset} does not point to anything.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check parsed object code.
|
||||
if (cursor.size !== offset) {
|
||||
const message = `Expected to parse ${cursor.size} bytes but parsed ${offset} instead.`;
|
||||
|
||||
if (lenient) {
|
||||
logger.error(message);
|
||||
} else {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
return segments;
|
||||
}
|
||||
|
||||
function find_and_parse_segments(
|
||||
cursor: Cursor,
|
||||
label_holder: LabelHolder,
|
||||
@ -660,15 +649,15 @@ class LabelHolder {
|
||||
/**
|
||||
* Labels and their offset sorted by offset and then label.
|
||||
*/
|
||||
labels: { label: number; offset: number }[] = [];
|
||||
readonly labels: { label: number; offset: number }[] = [];
|
||||
/**
|
||||
* Mapping of labels to their offset and index into labels.
|
||||
*/
|
||||
private label_map: Map<number, { offset: number; index: number }> = new Map();
|
||||
private readonly label_map: Map<number, { offset: number; index: number }> = new Map();
|
||||
/**
|
||||
* Mapping of offsets to lists of labels.
|
||||
*/
|
||||
private offset_map: Map<number, number[]> = new Map();
|
||||
private readonly offset_map: Map<number, number[]> = new Map();
|
||||
|
||||
constructor(label_offsets: readonly number[]) {
|
||||
// Populate the main label list.
|
||||
|
@ -5,10 +5,11 @@ import { BufferCursor } from "../../block/cursor/BufferCursor";
|
||||
import { ArrayBufferCursor } from "../../block/cursor/ArrayBufferCursor";
|
||||
import * as fs from "fs";
|
||||
import { Version } from "./Version";
|
||||
import { unwrap } from "../../../Result";
|
||||
|
||||
test("Parse a GC quest.", () => {
|
||||
const buf = fs.readFileSync("test/resources/lost_heat_sword_gc.qst");
|
||||
const qst = parse_qst(new BufferCursor(buf, Endianness.Little));
|
||||
const qst = unwrap(parse_qst(new BufferCursor(buf, Endianness.Little)));
|
||||
|
||||
expect(qst).toBeDefined();
|
||||
expect(qst!.version).toBe(Version.GC);
|
||||
@ -28,7 +29,7 @@ test("Parse a GC quest.", () => {
|
||||
test("parse_qst and write_qst", () => {
|
||||
walk_qst_files((_file_path, _file_name, file_content) => {
|
||||
const orig_qst = new BufferCursor(file_content, Endianness.Little);
|
||||
const orig_quest = parse_qst(orig_qst);
|
||||
const orig_quest = unwrap(parse_qst(orig_qst));
|
||||
|
||||
if (orig_quest) {
|
||||
const test_qst = new ArrayBufferCursor(write_qst(orig_quest), Endianness.Little);
|
||||
|
@ -7,6 +7,8 @@ import { ResizableBlock } from "../../block/ResizableBlock";
|
||||
import { assert, basename, defined } from "../../../util";
|
||||
import { LogManager } from "../../../Logger";
|
||||
import { Version } from "./Version";
|
||||
import { Result, ResultBuilder } from "../../../Result";
|
||||
import { Severity } from "../../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/quest/qst");
|
||||
|
||||
@ -41,18 +43,22 @@ export type QstContent = {
|
||||
|
||||
/**
|
||||
* Low level parsing function for .qst files.
|
||||
* Can only read the Blue Burst format.
|
||||
*/
|
||||
export function parse_qst(cursor: Cursor): QstContent | undefined {
|
||||
export function parse_qst(cursor: Cursor): Result<QstContent> {
|
||||
const result = new ResultBuilder<QstContent>(logger);
|
||||
|
||||
// A .qst file contains two headers that describe the embedded .dat and .bin files.
|
||||
// Read headers and contained files.
|
||||
const headers = parse_headers(cursor);
|
||||
|
||||
if (headers.length < 2) {
|
||||
logger.error(
|
||||
`Corrupt .qst file, expected at least 2 headers but only found ${headers.length}.`,
|
||||
);
|
||||
return undefined;
|
||||
return result
|
||||
.add_problem(
|
||||
Severity.Error,
|
||||
"This .qst file is corrupt.",
|
||||
`Corrupt .qst file, expected at least 2 headers but only found ${headers.length}.`,
|
||||
)
|
||||
.failure();
|
||||
}
|
||||
|
||||
let version: Version | undefined = undefined;
|
||||
@ -60,23 +66,29 @@ export function parse_qst(cursor: Cursor): QstContent | undefined {
|
||||
|
||||
for (const header of headers) {
|
||||
if (version != undefined && header.version !== version) {
|
||||
logger.error(
|
||||
`Corrupt .qst file, header version ${Version[header.version]} for file ${
|
||||
header.filename
|
||||
} doesn't match the previous header's version ${Version[version]}.`,
|
||||
);
|
||||
return undefined;
|
||||
return result
|
||||
.add_problem(
|
||||
Severity.Error,
|
||||
"This .qst file is corrupt.",
|
||||
`Corrupt .qst file, header version ${Version[header.version]} for file ${
|
||||
header.filename
|
||||
} doesn't match the previous header's version ${Version[version]}.`,
|
||||
)
|
||||
.failure();
|
||||
}
|
||||
|
||||
if (online != undefined && header.online !== online) {
|
||||
logger.error(
|
||||
`Corrupt .qst file, header type ${
|
||||
header.online ? '"online"' : '"download"'
|
||||
} for file ${header.filename} doesn't match the previous header's type ${
|
||||
online ? '"online"' : '"download"'
|
||||
}.`,
|
||||
);
|
||||
return undefined;
|
||||
return result
|
||||
.add_problem(
|
||||
Severity.Error,
|
||||
"This .qst file is corrupt.",
|
||||
`Corrupt .qst file, header type ${
|
||||
header.online ? '"online"' : '"download"'
|
||||
} for file ${header.filename} doesn't match the previous header's type ${
|
||||
online ? '"online"' : '"download"'
|
||||
}.`,
|
||||
)
|
||||
.failure();
|
||||
}
|
||||
|
||||
version = header.version;
|
||||
@ -86,13 +98,22 @@ export function parse_qst(cursor: Cursor): QstContent | undefined {
|
||||
defined(version, "version");
|
||||
defined(online, "online");
|
||||
|
||||
const files = parse_files(cursor, version, new Map(headers.map(h => [h.filename, h])));
|
||||
const files: Result<QstContainedFile[]> = parse_files(
|
||||
cursor,
|
||||
version,
|
||||
new Map(headers.map(h => [h.filename, h])),
|
||||
);
|
||||
result.add_result(files);
|
||||
|
||||
return {
|
||||
if (!files.success) {
|
||||
return result.failure();
|
||||
}
|
||||
|
||||
return result.success({
|
||||
version,
|
||||
online,
|
||||
files,
|
||||
};
|
||||
files: files.value,
|
||||
});
|
||||
}
|
||||
|
||||
export function write_qst({ version, online, files }: QstContent): ArrayBuffer {
|
||||
@ -123,9 +144,10 @@ export function write_qst({ version, online, files }: QstContent): ArrayBuffer {
|
||||
write_file_headers(cursor, files, version, online, file_header_size);
|
||||
write_file_chunks(cursor, files, version);
|
||||
|
||||
if (cursor.position !== total_size) {
|
||||
throw new Error(`Expected a final file size of ${total_size}, but got ${cursor.position}.`);
|
||||
}
|
||||
assert(
|
||||
cursor.position === total_size,
|
||||
() => `Expected a final file size of ${total_size}, but got ${cursor.position}.`,
|
||||
);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
@ -266,7 +288,9 @@ function parse_files(
|
||||
cursor: Cursor,
|
||||
version: Version,
|
||||
headers: Map<string, QstHeader>,
|
||||
): QstContainedFile[] {
|
||||
): Result<QstContainedFile[]> {
|
||||
const result = new ResultBuilder<QstContainedFile[]>(logger);
|
||||
|
||||
// Files are interleaved in 1056 byte chunks.
|
||||
// Each chunk has a 20 or 24 byte header, 1024 byte data segment and an 4 or 8 byte trailer.
|
||||
const files = new Map<
|
||||
@ -338,7 +362,8 @@ function parse_files(
|
||||
}
|
||||
|
||||
if (file.chunk_nos.has(chunk_no)) {
|
||||
logger.warn(
|
||||
result.add_problem(
|
||||
Severity.Warning,
|
||||
`File chunk number ${chunk_no} of file ${file_name} was already encountered, overwriting previous chunk.`,
|
||||
);
|
||||
} else {
|
||||
@ -350,7 +375,8 @@ function parse_files(
|
||||
cursor.seek(-CHUNK_BODY_SIZE - 4);
|
||||
|
||||
if (size > CHUNK_BODY_SIZE) {
|
||||
logger.warn(
|
||||
result.add_problem(
|
||||
Severity.Warning,
|
||||
`Data segment size of ${size} is larger than expected maximum size, reading just ${CHUNK_BODY_SIZE} bytes.`,
|
||||
);
|
||||
size = CHUNK_BODY_SIZE;
|
||||
@ -364,17 +390,17 @@ function parse_files(
|
||||
// Skip the padding and the trailer.
|
||||
cursor.seek(CHUNK_BODY_SIZE + trailer_size - data.size);
|
||||
|
||||
if (cursor.position !== start_position + chunk_size) {
|
||||
throw new Error(
|
||||
assert(
|
||||
cursor.position === start_position + chunk_size,
|
||||
() =>
|
||||
`Read ${
|
||||
cursor.position - start_position
|
||||
} file chunk message bytes instead of expected ${chunk_size}.`,
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (cursor.bytes_left) {
|
||||
logger.warn(`${cursor.bytes_left} Bytes left in file.`);
|
||||
result.add_problem(Severity.Warning, `${cursor.bytes_left} Bytes left in file.`);
|
||||
}
|
||||
|
||||
for (const file of files.values()) {
|
||||
@ -384,7 +410,8 @@ function parse_files(
|
||||
|
||||
// Check whether the expected size was correct.
|
||||
if (file.expected_size != null && file.cursor.size !== file.expected_size) {
|
||||
logger.warn(
|
||||
result.add_problem(
|
||||
Severity.Warning,
|
||||
`File ${file.name} has an actual size of ${file.cursor.size} instead of the expected size ${file.expected_size}.`,
|
||||
);
|
||||
}
|
||||
@ -395,7 +422,10 @@ function parse_files(
|
||||
|
||||
for (let chunk_no = 0; chunk_no < expected_chunk_count; ++chunk_no) {
|
||||
if (!file.chunk_nos.has(chunk_no)) {
|
||||
logger.warn(`File ${file.name} is missing chunk ${chunk_no}.`);
|
||||
result.add_problem(
|
||||
Severity.Warning,
|
||||
`File ${file.name} is missing chunk ${chunk_no}.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -412,7 +442,7 @@ function parse_files(
|
||||
});
|
||||
}
|
||||
|
||||
return contained_files;
|
||||
return result.success(contained_files);
|
||||
}
|
||||
|
||||
function write_file_headers(
|
||||
@ -530,11 +560,11 @@ function write_file_chunks(
|
||||
for (const file_to_chunk of files_to_chunk) {
|
||||
const expected_chunks = Math.ceil(file_to_chunk.data.size / CHUNK_BODY_SIZE);
|
||||
|
||||
if (file_to_chunk.no !== expected_chunks) {
|
||||
throw new Error(
|
||||
assert(
|
||||
file_to_chunk.no === expected_chunks,
|
||||
() =>
|
||||
`Expected to write ${expected_chunks} chunks for file "${file_to_chunk.name}" but ${file_to_chunk.no} where written.`,
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,8 @@ import { Endianness } from "../block/Endianness";
|
||||
import { Cursor } from "../block/cursor/Cursor";
|
||||
import { parse_prc } from "./prc";
|
||||
import { LogManager } from "../../Logger";
|
||||
import { Result, ResultBuilder } from "../../Result";
|
||||
import { Severity } from "../../Severity";
|
||||
|
||||
const logger = LogManager.get("core/data_formats/parsing/rlc");
|
||||
const MARKER = "RelChunkVer0.20";
|
||||
@ -11,11 +13,16 @@ const MARKER = "RelChunkVer0.20";
|
||||
*
|
||||
* @returns the contained files, decrypted and decompressed.
|
||||
*/
|
||||
export function parse_rlc(cursor: Cursor): Cursor[] {
|
||||
export function parse_rlc(cursor: Cursor): Result<Cursor[]> {
|
||||
const rb = new ResultBuilder<Cursor[]>(logger);
|
||||
const marker = cursor.string_ascii(16, true, true);
|
||||
|
||||
if (marker !== MARKER) {
|
||||
logger.warn(`First 16 bytes where "${marker}" instead of expected "${MARKER}".`);
|
||||
rb.add_problem(
|
||||
Severity.Warning,
|
||||
"This file doesn't seem to be an RLC file.",
|
||||
`First 16 bytes where "${marker}" instead of expected "${MARKER}".`,
|
||||
);
|
||||
}
|
||||
|
||||
const table_size = cursor.u32();
|
||||
@ -33,10 +40,18 @@ export function parse_rlc(cursor: Cursor): Cursor[] {
|
||||
const file = cursor.take(size);
|
||||
file.endianness = Endianness.Little;
|
||||
file.seek_start(0);
|
||||
files.push(parse_prc(file));
|
||||
|
||||
const prc_result = parse_prc(file);
|
||||
rb.add_result(prc_result);
|
||||
|
||||
if (!prc_result.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
files.push(prc_result.value);
|
||||
|
||||
cursor.seek_start(prev_pos);
|
||||
}
|
||||
|
||||
return files;
|
||||
return rb.success(files);
|
||||
}
|
||||
|
@ -1,11 +1,18 @@
|
||||
import { prs_decompress } from "../compression/prs/decompress";
|
||||
import { Cursor } from "../block/cursor/Cursor";
|
||||
import { Result, success } from "../../Result";
|
||||
|
||||
export type Unitxt = string[][];
|
||||
|
||||
export function parse_unitxt(buf: Cursor, compressed: boolean = true): Unitxt {
|
||||
export function parse_unitxt(buf: Cursor, compressed: boolean = true): Result<Unitxt> {
|
||||
if (compressed) {
|
||||
buf = prs_decompress(buf);
|
||||
const decompression_result = prs_decompress(buf);
|
||||
|
||||
if (!decompression_result.success) {
|
||||
return decompression_result;
|
||||
}
|
||||
|
||||
buf = decompression_result.value;
|
||||
}
|
||||
|
||||
const category_count = buf.u32();
|
||||
@ -29,5 +36,5 @@ export function parse_unitxt(buf: Cursor, compressed: boolean = true): Unitxt {
|
||||
}
|
||||
}
|
||||
|
||||
return categories;
|
||||
return success(categories);
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ import { LogManager } from "../../core/Logger";
|
||||
import { basename } from "../../core/util";
|
||||
import { Version } from "../../core/data_formats/parsing/quest/Version";
|
||||
import { WritableProperty } from "../../core/observable/property/WritableProperty";
|
||||
import { failure, Result } from "../../core/Result";
|
||||
import { failure, problem, Result } from "../../core/Result";
|
||||
import { Severity } from "../../core/Severity";
|
||||
import { Quest } from "../../core/data_formats/parsing/quest/Quest";
|
||||
import { QuestLoader } from "../loading/QuestLoader";
|
||||
@ -151,12 +151,13 @@ export class QuestEditorToolBarController extends Controller {
|
||||
const parse_result = parse_qst_to_quest(
|
||||
new ArrayBufferCursor(buffer, Endianness.Little),
|
||||
);
|
||||
if (!parse_result || !parse_result.quest) {
|
||||
throw new Error("Couldn't parse quest file.");
|
||||
this.set_result(parse_result);
|
||||
|
||||
if (parse_result.success) {
|
||||
quest = parse_result.value.quest;
|
||||
this.set_version(parse_result.value.version);
|
||||
this.set_filename(basename(qst.name));
|
||||
}
|
||||
quest = parse_result.quest;
|
||||
this.set_version(parse_result.version);
|
||||
this.set_filename(basename(qst.name));
|
||||
} else {
|
||||
const bin = files.find(f => f.name.toLowerCase().endsWith(".bin"));
|
||||
const dat = files.find(f => f.name.toLowerCase().endsWith(".dat"));
|
||||
@ -164,25 +165,36 @@ export class QuestEditorToolBarController extends Controller {
|
||||
if (bin && dat) {
|
||||
const bin_buffer = await read_file(bin);
|
||||
const dat_buffer = await read_file(dat);
|
||||
quest = parse_bin_dat_to_quest(
|
||||
const parse_result = parse_bin_dat_to_quest(
|
||||
new ArrayBufferCursor(bin_buffer, Endianness.Little),
|
||||
new ArrayBufferCursor(dat_buffer, Endianness.Little),
|
||||
);
|
||||
if (!quest) {
|
||||
throw new Error("Couldn't parse .bin or .dat file.");
|
||||
this.set_result(parse_result);
|
||||
|
||||
if (parse_result.success) {
|
||||
quest = parse_result.value;
|
||||
this.set_filename(basename(bin.name || dat.name));
|
||||
}
|
||||
this.set_filename(basename(bin.name || dat.name));
|
||||
} else {
|
||||
throw new Error("Please select one .qst file or one .bin and one .dat file.");
|
||||
this.set_result(
|
||||
failure(
|
||||
problem(
|
||||
Severity.Error,
|
||||
"Please select a .qst file or one .bin and one .dat file.",
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await this.quest_editor_store.set_current_quest(
|
||||
quest && convert_quest_to_model(this.area_store, quest),
|
||||
);
|
||||
if (quest) {
|
||||
await this.quest_editor_store.set_current_quest(
|
||||
convert_quest_to_model(this.area_store, quest),
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error("Couldn't read file.", e);
|
||||
this.set_result(failure([{ severity: Severity.Error, ui_message: e.message }]));
|
||||
this.set_result(failure(problem(Severity.Error, e.message)));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -8,6 +8,7 @@ import { ArrayBufferCursor } from "../../core/data_formats/block/cursor/ArrayBuf
|
||||
import { Endianness } from "../../core/data_formats/block/Endianness";
|
||||
import { assert } from "../../core/util";
|
||||
import { Quest } from "../../core/data_formats/parsing/quest/Quest";
|
||||
import { unwrap } from "../../core/Result";
|
||||
|
||||
export class QuestLoader implements Disposable {
|
||||
private readonly cache = new LoadingCache<string, ArrayBuffer>();
|
||||
@ -29,7 +30,9 @@ export class QuestLoader implements Disposable {
|
||||
return this.cache
|
||||
.get_or_set(path, () => this.http_client.get(`/quests${path}`).array_buffer())
|
||||
.then(buffer => {
|
||||
const result = parse_qst_to_quest(new ArrayBufferCursor(buffer, Endianness.Little));
|
||||
const result = unwrap(
|
||||
parse_qst_to_quest(new ArrayBufferCursor(buffer, Endianness.Little)),
|
||||
);
|
||||
assert(result, () => `Quest "${path}" can't be parsed.`);
|
||||
return result.quest;
|
||||
});
|
||||
|
@ -26,6 +26,7 @@ import {
|
||||
write_object_code,
|
||||
} from "../../core/data_formats/parsing/quest/object_code";
|
||||
import { BinFormat } from "../../core/data_formats/parsing/quest/BinFormat";
|
||||
import { unwrap } from "../../core/Result";
|
||||
|
||||
test("vararg instructions should be disassembled correctly", () => {
|
||||
const asm = disassemble([
|
||||
@ -93,14 +94,10 @@ test("va list instructions should be disassembled correctly", () => {
|
||||
// Round-trip test.
|
||||
test("assembling disassembled object code with manual stack management should result in the same IR", () => {
|
||||
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
|
||||
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_bytes = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const { bin } = parse_bin(orig_bytes);
|
||||
const orig_object_code = parse_object_code(
|
||||
bin.object_code,
|
||||
bin.label_offsets,
|
||||
[0],
|
||||
false,
|
||||
BinFormat.BB,
|
||||
const orig_object_code = unwrap(
|
||||
parse_object_code(bin.object_code, bin.label_offsets, [0], false, BinFormat.BB),
|
||||
);
|
||||
|
||||
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, true), true);
|
||||
@ -114,14 +111,10 @@ test("assembling disassembled object code with manual stack management should re
|
||||
// Round-trip test.
|
||||
test("assembling disassembled object code with automatic stack management should result in the same IR", () => {
|
||||
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
|
||||
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_bytes = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const { bin } = parse_bin(orig_bytes);
|
||||
const orig_object_code = parse_object_code(
|
||||
bin.object_code,
|
||||
bin.label_offsets,
|
||||
[0],
|
||||
false,
|
||||
BinFormat.BB,
|
||||
const orig_object_code = unwrap(
|
||||
parse_object_code(bin.object_code, bin.label_offsets, [0], false, BinFormat.BB),
|
||||
);
|
||||
|
||||
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, false), false);
|
||||
@ -135,14 +128,10 @@ test("assembling disassembled object code with automatic stack management should
|
||||
// Round-trip test.
|
||||
test("assembling disassembled object code with manual stack management should result in the same object code", () => {
|
||||
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
|
||||
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_bytes = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const { bin, format } = parse_bin(orig_bytes);
|
||||
const orig_object_code = parse_object_code(
|
||||
bin.object_code,
|
||||
bin.label_offsets,
|
||||
[0],
|
||||
false,
|
||||
BinFormat.BB,
|
||||
const orig_object_code = unwrap(
|
||||
parse_object_code(bin.object_code, bin.label_offsets, [0], false, BinFormat.BB),
|
||||
);
|
||||
|
||||
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, true), true);
|
||||
@ -179,14 +168,10 @@ test("assembling disassembled object code with manual stack management should re
|
||||
// Round-trip test.
|
||||
test("disassembling assembled assembly code with automatic stack management should result the same assembly code", () => {
|
||||
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
|
||||
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
|
||||
const orig_bytes = unwrap(prs_decompress(new BufferCursor(orig_buffer, Endianness.Little)));
|
||||
const { bin } = parse_bin(orig_bytes);
|
||||
const orig_object_code = parse_object_code(
|
||||
bin.object_code,
|
||||
bin.label_offsets,
|
||||
[0],
|
||||
false,
|
||||
BinFormat.BB,
|
||||
const orig_object_code = unwrap(
|
||||
parse_object_code(bin.object_code, bin.label_offsets, [0], false, BinFormat.BB),
|
||||
);
|
||||
const orig_asm = disassemble(orig_object_code, false);
|
||||
|
||||
|
@ -6,14 +6,12 @@ import { ArrayBufferCursor } from "../../../core/data_formats/block/cursor/Array
|
||||
import { Endianness } from "../../../core/data_formats/block/Endianness";
|
||||
import { parse_nj, parse_xj } from "../../../core/data_formats/parsing/ninja";
|
||||
import { parse_njm } from "../../../core/data_formats/parsing/ninja/motion";
|
||||
import { is_xvm, parse_xvm, XvrTexture } from "../../../core/data_formats/parsing/ninja/texture";
|
||||
import { parse_afs } from "../../../core/data_formats/parsing/afs";
|
||||
import { LogManager } from "../../../core/Logger";
|
||||
import { prs_decompress } from "../../../core/data_formats/compression/prs/decompress";
|
||||
import { failure, Result, result_builder, success } from "../../../core/Result";
|
||||
import { failure, problem, Result, success } from "../../../core/Result";
|
||||
import { Severity } from "../../../core/Severity";
|
||||
import { property } from "../../../core/observable";
|
||||
import { WritableProperty } from "../../../core/observable/property/WritableProperty";
|
||||
import { parse_afs_textures, parse_xvm_textures } from "../../util/texture_parsing";
|
||||
|
||||
const logger = LogManager.get("viewer/controllers/model/ModelToolBarController");
|
||||
|
||||
@ -97,54 +95,19 @@ export class ModelToolBarController extends Controller {
|
||||
this.store.set_current_nj_motion(parse_njm(cursor, nj_object.bone_count()));
|
||||
this.set_result(success(undefined));
|
||||
} else {
|
||||
this.set_result(
|
||||
failure([{ severity: Severity.Error, ui_message: "No model to animate" }]),
|
||||
);
|
||||
this.set_result(failure(problem(Severity.Error, "No model to animate")));
|
||||
}
|
||||
} else if (file.name.endsWith(".xvm")) {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
const xvm_result = parse_xvm_textures(cursor);
|
||||
this.set_result(xvm_result);
|
||||
|
||||
if (xvm_result.success) {
|
||||
this.store.set_current_textures(xvm_result.value.textures);
|
||||
} else {
|
||||
this.store.set_current_textures([]);
|
||||
}
|
||||
this.store.set_current_textures(xvm_result.value ?? []);
|
||||
} else if (file.name.endsWith(".afs")) {
|
||||
const rb = result_builder(logger);
|
||||
const afs_result = parse_afs(cursor);
|
||||
rb.add_result(afs_result);
|
||||
|
||||
if (!afs_result.success) {
|
||||
this.set_result(rb.failure());
|
||||
} else {
|
||||
const textures: XvrTexture[] = afs_result.value.flatMap(file => {
|
||||
const cursor = new ArrayBufferCursor(file, Endianness.Little);
|
||||
|
||||
if (is_xvm(cursor)) {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
} else {
|
||||
const xvm_result = parse_xvm(prs_decompress(cursor.seek_start(0)));
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
}
|
||||
});
|
||||
|
||||
if (textures.length) {
|
||||
this.set_result(rb.success(textures));
|
||||
} else {
|
||||
this.set_result(rb.failure());
|
||||
}
|
||||
|
||||
this.store.set_current_textures(textures);
|
||||
}
|
||||
const afs_result = parse_afs_textures(cursor);
|
||||
this.set_result(afs_result);
|
||||
this.store.set_current_textures(afs_result.value ?? []);
|
||||
} else {
|
||||
logger.debug(`Unsupported file extension in filename "${file.name}".`);
|
||||
this.set_result(
|
||||
failure([{ severity: Severity.Error, ui_message: "Unsupported file type." }]),
|
||||
);
|
||||
this.set_result(failure(problem(Severity.Error, "Unsupported file type.")));
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error("Couldn't read file.", e);
|
||||
|
@ -1,19 +1,18 @@
|
||||
import { Controller } from "../../../core/controllers/Controller";
|
||||
import { filename_extension } from "../../../core/util";
|
||||
import { read_file } from "../../../core/files";
|
||||
import { is_xvm, parse_xvm, XvrTexture } from "../../../core/data_formats/parsing/ninja/texture";
|
||||
import { XvrTexture } from "../../../core/data_formats/parsing/ninja/texture";
|
||||
import { ArrayBufferCursor } from "../../../core/data_formats/block/cursor/ArrayBufferCursor";
|
||||
import { Endianness } from "../../../core/data_formats/block/Endianness";
|
||||
import { parse_afs } from "../../../core/data_formats/parsing/afs";
|
||||
import { LogManager } from "../../../core/Logger";
|
||||
import { WritableListProperty } from "../../../core/observable/property/list/WritableListProperty";
|
||||
import { list_property, property } from "../../../core/observable";
|
||||
import { ListProperty } from "../../../core/observable/property/list/ListProperty";
|
||||
import { prs_decompress } from "../../../core/data_formats/compression/prs/decompress";
|
||||
import { failure, Result, result_builder } from "../../../core/Result";
|
||||
import { failure, problem, Result } from "../../../core/Result";
|
||||
import { Severity } from "../../../core/Severity";
|
||||
import { Property } from "../../../core/observable/property/Property";
|
||||
import { WritableProperty } from "../../../core/observable/property/WritableProperty";
|
||||
import { parse_afs_textures, parse_xvm_textures } from "../../util/texture_parsing";
|
||||
|
||||
const logger = LogManager.get("viewer/controllers/TextureController");
|
||||
|
||||
@ -38,49 +37,21 @@ export class TextureController extends Controller {
|
||||
const ext = filename_extension(file.name).toLowerCase();
|
||||
const buffer = await read_file(file);
|
||||
const cursor = new ArrayBufferCursor(buffer, Endianness.Little);
|
||||
let result: Result<XvrTexture[]>;
|
||||
|
||||
if (ext === "xvm") {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
this.set_result(xvm_result);
|
||||
|
||||
if (xvm_result.success) {
|
||||
this._textures.val = xvm_result.value.textures;
|
||||
}
|
||||
result = parse_xvm_textures(cursor);
|
||||
} else if (ext === "afs") {
|
||||
const rb = result_builder(logger);
|
||||
const afs_result = parse_afs(cursor);
|
||||
rb.add_result(afs_result);
|
||||
|
||||
if (!afs_result.success) {
|
||||
this.set_result(rb.failure());
|
||||
} else {
|
||||
const textures: XvrTexture[] = afs_result.value.flatMap(file => {
|
||||
const cursor = new ArrayBufferCursor(file, Endianness.Little);
|
||||
|
||||
if (is_xvm(cursor)) {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
} else {
|
||||
const xvm_result = parse_xvm(prs_decompress(cursor.seek_start(0)));
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
}
|
||||
});
|
||||
|
||||
if (textures.length) {
|
||||
this.set_result(rb.success(textures));
|
||||
} else {
|
||||
this.set_result(rb.failure());
|
||||
}
|
||||
|
||||
this._textures.val = textures;
|
||||
}
|
||||
result = parse_afs_textures(cursor);
|
||||
} else {
|
||||
logger.debug(`Unsupported file extension in filename "${file.name}".`);
|
||||
this.set_result(
|
||||
failure([{ severity: Severity.Error, ui_message: "Unsupported file type." }]),
|
||||
);
|
||||
result = failure(problem(Severity.Error, "Unsupported file type."));
|
||||
}
|
||||
|
||||
this.set_result(result);
|
||||
|
||||
if (result.success) {
|
||||
this._textures.val = result.value;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error("Couldn't read file.", e);
|
||||
|
58
src/viewer/util/texture_parsing.ts
Normal file
58
src/viewer/util/texture_parsing.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import { Cursor } from "../../core/data_formats/block/cursor/Cursor";
|
||||
import { Result, ResultBuilder, success } from "../../core/Result";
|
||||
import { is_xvm, parse_xvm, XvrTexture } from "../../core/data_formats/parsing/ninja/texture";
|
||||
import { parse_afs } from "../../core/data_formats/parsing/afs";
|
||||
import { Severity } from "../../core/Severity";
|
||||
import { ArrayBufferCursor } from "../../core/data_formats/block/cursor/ArrayBufferCursor";
|
||||
import { Endianness } from "../../core/data_formats/block/Endianness";
|
||||
import { prs_decompress } from "../../core/data_formats/compression/prs/decompress";
|
||||
import { LogManager } from "../../core/Logger";
|
||||
|
||||
const logger = LogManager.get("viewer/util/texture_parsing");
|
||||
|
||||
export function parse_xvm_textures(cursor: Cursor): Result<XvrTexture[]> {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
|
||||
if (!xvm_result.success) {
|
||||
return xvm_result;
|
||||
}
|
||||
|
||||
return success(xvm_result.value.textures);
|
||||
}
|
||||
|
||||
export function parse_afs_textures(cursor: Cursor): Result<XvrTexture[]> {
|
||||
const rb = new ResultBuilder<XvrTexture[]>(logger);
|
||||
const afs_result = parse_afs(cursor);
|
||||
rb.add_result(afs_result);
|
||||
|
||||
if (!afs_result.success) {
|
||||
return rb.failure();
|
||||
}
|
||||
|
||||
if (afs_result.value.length === 0) {
|
||||
rb.add_problem(Severity.Info, "AFS archive contains no files.");
|
||||
}
|
||||
|
||||
const textures: XvrTexture[] = afs_result.value.flatMap(file => {
|
||||
const cursor = new ArrayBufferCursor(file, Endianness.Little);
|
||||
|
||||
if (is_xvm(cursor)) {
|
||||
const xvm_result = parse_xvm(cursor);
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
} else {
|
||||
const decompression_result = prs_decompress(cursor.seek_start(0));
|
||||
rb.add_result(decompression_result);
|
||||
|
||||
if (!decompression_result.success) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const xvm_result = parse_xvm(decompression_result.value);
|
||||
rb.add_result(xvm_result);
|
||||
return xvm_result.value?.textures ?? [];
|
||||
}
|
||||
});
|
||||
|
||||
return rb.success(textures);
|
||||
}
|
@ -8,6 +8,7 @@ import { Quest } from "../../src/core/data_formats/parsing/quest/Quest";
|
||||
import { QuestModel } from "../../src/quest_editor/model/QuestModel";
|
||||
import { AreaStore } from "../../src/quest_editor/stores/AreaStore";
|
||||
import { convert_quest_to_model } from "../../src/quest_editor/stores/model_conversion";
|
||||
import { unwrap } from "../../src/core/Result";
|
||||
|
||||
export async function timeout(millis: number): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
@ -69,12 +70,13 @@ export function get_qst_files(dir: string): [string, string][] {
|
||||
export function load_default_quest_model(area_store: AreaStore): QuestModel {
|
||||
return convert_quest_to_model(
|
||||
area_store,
|
||||
load_qst_as_quest("assets/quests/defaults/default_ep_1.qst")!,
|
||||
load_qst_as_quest("assets/quests/defaults/default_ep_1.qst"),
|
||||
);
|
||||
}
|
||||
|
||||
export function load_qst_as_quest(path: string): Quest | undefined {
|
||||
return parse_qst_to_quest(new BufferCursor(fs.readFileSync(path), Endianness.Little))?.quest;
|
||||
export function load_qst_as_quest(path: string): Quest {
|
||||
return unwrap(parse_qst_to_quest(new BufferCursor(fs.readFileSync(path), Endianness.Little)))
|
||||
.quest;
|
||||
}
|
||||
|
||||
export function to_instructions(assembly: string, manual_stack?: boolean): InstructionSegment[] {
|
||||
|
Loading…
Reference in New Issue
Block a user