Fixed bug in prs compression code. Improved prs compression performance by about 5x. Instructions are now written as object code in quest bin files.

This commit is contained in:
Daan Vanden Bosch 2019-07-22 12:31:20 +02:00
parent 73e199d724
commit cb7f088f22
22 changed files with 5346 additions and 1098 deletions

2
.env.test Normal file
View File

@ -0,0 +1,2 @@
REACT_APP_LOG_LEVEL=WARN
RUN_ALL_TESTS=false

View File

@ -3,7 +3,7 @@ import { WritableCursor } from "../../cursor/WritableCursor";
import { WritableResizableBufferCursor } from "../../cursor/WritableResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
export function compress(src: Cursor): Cursor {
export function prs_compress(src: Cursor): Cursor {
const ctx = new Context(src);
const hash_table = new HashTable();
@ -111,9 +111,9 @@ const HASH_SIZE = 1 << 8;
class Context {
src: Cursor;
dst: WritableCursor;
flags: number;
flag_bits_left: number;
flag_offset: number;
flags = 0;
flag_bits_left = 0;
flag_offset = 0;
constructor(cursor: Cursor) {
this.src = cursor;
@ -121,9 +121,6 @@ class Context {
new ResizableBuffer(cursor.size),
cursor.endianness
);
this.flags = 0;
this.flag_bits_left = 0;
this.flag_offset = 0;
}
set_bit(bit: number): void {
@ -178,6 +175,12 @@ class Context {
let s1 = this.src.position;
const size = this.src.size;
while (s1 < size - 4 && this.src.u32_at(s1) === this.src.u32_at(s2)) {
len += 4;
s1 += 4;
s2 += 4;
}
while (s1 < size && this.src.u8_at(s1) === this.src.u8_at(s2)) {
++len;
++s1;
@ -198,7 +201,7 @@ class Context {
// If there is nothing in the table at that point, bail out now.
let entry = hash_table.get(hash);
if (entry === null) {
if (entry === -1) {
if (!lazy) {
hash_table.put(hash, this.src.position);
}
@ -208,7 +211,7 @@ class Context {
// If we'd go outside the window, truncate the hash chain now.
if (this.src.position - entry > MAX_WINDOW) {
hash_table.hash_to_offset[hash] = null;
hash_table.hash_to_offset[hash] = -1;
if (!lazy) {
hash_table.put(hash, this.src.position);
@ -222,7 +225,7 @@ class Context {
let longest_length = 0;
let longest_match = 0;
while (entry != null) {
while (entry !== -1) {
const mlen = this.match_length(entry);
if (mlen > longest_length || mlen >= 256) {
@ -233,11 +236,11 @@ class Context {
// Follow the chain, making sure not to exceed a difference of MAX_WINDOW.
let entry_2 = hash_table.prev(entry);
if (entry_2 !== null) {
if (entry_2 !== -1) {
// If we'd go outside the window, truncate the hash chain now.
if (this.src.position - entry_2 > MAX_WINDOW) {
hash_table.set_prev(entry, null);
entry_2 = null;
hash_table.set_prev(entry, -1);
entry_2 = -1;
}
}
@ -266,8 +269,8 @@ class Context {
}
class HashTable {
hash_to_offset: (number | null)[] = new Array(HASH_SIZE).fill(null);
masked_offset_to_prev: (number | null)[] = new Array(MAX_WINDOW).fill(null);
hash_to_offset = new Int32Array(HASH_SIZE).fill(-1);
masked_offset_to_prev = new Int16Array(MAX_WINDOW).fill(-1);
hash(cursor: Cursor): number {
let hash = cursor.u8();
@ -281,7 +284,7 @@ class HashTable {
return hash;
}
get(hash: number): number | null {
get(hash: number): number {
return this.hash_to_offset[hash];
}
@ -290,11 +293,11 @@ class HashTable {
this.hash_to_offset[hash] = offset;
}
prev(offset: number): number | null {
prev(offset: number): number {
return this.masked_offset_to_prev[offset & WINDOW_MASK];
}
set_prev(offset: number, prevOffset: number | null): void {
this.masked_offset_to_prev[offset & WINDOW_MASK] = prevOffset;
set_prev(offset: number, prev_offset: number): void {
this.masked_offset_to_prev[offset & WINDOW_MASK] = prev_offset;
}
}

View File

@ -6,7 +6,7 @@ import { ResizableBuffer } from "../../ResizableBuffer";
const logger = Logger.get("data_formats/compression/prs/decompress");
export function decompress(cursor: Cursor): Cursor {
export function prs_decompress(cursor: Cursor): Cursor {
const ctx = new Context(cursor);
while (true) {

View File

@ -1,14 +1,17 @@
import { readFileSync } from "fs";
import { Endianness } from "../..";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { compress, decompress } from "../prs";
import { BufferCursor } from "../../cursor/BufferCursor";
import { prs_compress } from "./compress";
import { prs_decompress } from "./decompress";
function test_with_bytes(bytes: number[], expected_compressed_size: number): void {
const cursor = new ArrayBufferCursor(new Uint8Array(bytes).buffer, Endianness.Little);
const compressed_cursor = compress(cursor);
const compressed_cursor = prs_compress(cursor);
expect(compressed_cursor.size).toBe(expected_compressed_size);
const test_cursor = decompress(compressed_cursor);
const test_cursor = prs_decompress(compressed_cursor);
cursor.seek_start(0);
expect(test_cursor.size).toBe(cursor.size);
@ -64,6 +67,32 @@ test("PRS compression and decompression, 3 bytes", () => {
test_with_bytes([56, 237, 158], 6);
});
test("PRS compression and decompression of quest118_e.bin", () => {
const buffer = readFileSync("test/resources/quest118_e.bin");
const orig = prs_decompress(new BufferCursor(buffer, Endianness.Little));
const test = prs_decompress(prs_compress(orig));
orig.seek_start(0);
expect(test.size).toBe(orig.size);
let matching_bytes = 0;
while (orig.bytes_left) {
const test_byte = test.u8();
const orig_byte = orig.u8();
if (test_byte !== orig_byte) {
throw new Error(
`Byte ${matching_bytes} didn't match, expected ${orig_byte}, got ${test_byte}.`
);
}
matching_bytes++;
}
expect(matching_bytes).toBe(orig.size);
});
class Prng {
seed = 1;

View File

@ -1,2 +0,0 @@
export { compress } from "./compress";
export { decompress } from "./decompress";

View File

@ -43,6 +43,16 @@ export class WritableArrayBufferCursor extends ArrayBufferCursor implements Writ
return this;
}
write_u16_array(array: number[]): this {
const len = array.length;
for (let i = 0; i < len; i++) {
this.write_u16(array[i]);
}
return this;
}
write_vec2_f32(value: Vec2): this {
this.dv.setFloat32(this.position, value.x, this.little_endian);
this.dv.setFloat32(this.position + 4, value.y, this.little_endian);

View File

@ -37,6 +37,11 @@ export interface WritableCursor extends Cursor {
*/
write_u8_array(array: number[]): this;
/**
* Writes an array of unsigned 16-bit integers and increments position by twice the array's length.
*/
write_u16_array(array: number[]): this;
/**
* Writes two 32-bit floating point numbers and increments position by 8.
*/

View File

@ -60,6 +60,17 @@ export class WritableResizableBufferCursor extends ResizableBufferCursor impleme
return this;
}
write_u16_array(array: number[]): this {
this.ensure_size(2 * array.length);
const len = array.length;
for (let i = 0; i < len; i++) {
this.write_u16(array[i]);
}
return this;
}
write_vec2_f32(value: Vec2): this {
this.ensure_size(8);
this.dv.setFloat32(this.position, value.x, this.little_endian);

View File

@ -1,5 +1,5 @@
import Logger from "js-logger";
import { decompress } from "../compression/prs";
import { prs_decompress } from "../compression/prs/decompress";
import { Cursor } from "../cursor/Cursor";
import { decrypt } from "../encryption/prc";
@ -12,7 +12,7 @@ export function parse_prc(cursor: Cursor): Cursor {
// Unencrypted, decompressed size.
const size = cursor.u32();
let key = cursor.u32();
const out = decompress(decrypt(key, cursor));
const out = prs_decompress(decrypt(key, cursor));
if (out.size !== size) {
logger.warn(

View File

@ -1,17 +1,18 @@
import { readFileSync } from "fs";
import { Endianness } from "../..";
import * as prs from "../../compression/prs";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { BufferCursor } from "../../cursor/BufferCursor";
import { parse_bin, write_bin } from "./bin";
import { prs_decompress } from "../../compression/prs/decompress";
/**
* Parse a file, convert the resulting structure to BIN again and check whether the end result is equal to the original.
*/
test("parse_bin and write_bin", () => {
const orig_buffer = readFileSync("test/resources/quest118_e.bin");
const orig_bin = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_bin = new ArrayBufferCursor(write_bin(parse_bin(orig_bin)), Endianness.Little);
const orig_bin = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_buffer = write_bin(parse_bin(orig_bin));
const test_bin = new ArrayBufferCursor(test_buffer, Endianness.Little);
orig_bin.seek_start(0);
expect(test_bin.size).toBe(orig_bin.size);

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +1,16 @@
import * as fs from "fs";
import { Endianness } from "../..";
import * as prs from "../../compression/prs";
import { prs_decompress } from "../../compression/prs/decompress";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { parse_dat, write_dat } from "./dat";
import { readFileSync } from "fs";
/**
* Parse a file, convert the resulting structure to DAT again and check whether the end result is equal to the original.
*/
test("parse_dat and write_dat", () => {
const orig_buffer = fs.readFileSync("test/resources/quest118_e.dat");
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const orig_buffer = readFileSync("test/resources/quest118_e.dat");
const orig_dat = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_dat = new ResizableBufferCursor(write_dat(parse_dat(orig_dat)), Endianness.Little);
orig_dat.seek_start(0);
@ -32,8 +32,8 @@ test("parse_dat and write_dat", () => {
* Parse a file, modify the resulting structure, convert it to DAT again and check whether the end result is equal to the original except for the bytes that should be changed.
*/
test("parse, modify and write DAT", () => {
const orig_buffer = fs.readFileSync("./test/resources/quest118_e.dat");
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const orig_buffer = readFileSync("./test/resources/quest118_e.dat");
const orig_dat = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_parsed = parse_dat(orig_dat);
orig_dat.seek_start(0);

View File

@ -1,12 +1,13 @@
import * as fs from "fs";
import { ObjectType, Quest } from "../../../domain";
import { parse_quest, write_quest_qst } from "../quest";
import { Endianness } from "../..";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { walk_qst_files } from "../../../../test/src/utils";
import { readFileSync } from "fs";
test("parse Towards the Future", () => {
const buffer = fs.readFileSync("test/resources/quest118_e.qst");
const buffer = readFileSync("test/resources/quest118_e.qst");
const cursor = new BufferCursor(buffer, Endianness.Little);
const quest = parse_quest(cursor)!;
@ -35,37 +36,63 @@ test("parse Towards the Future", () => {
});
/**
* Roundtrip test.
* Roundtrip tests.
* Parse a QST file, write the resulting Quest object to QST again, then parse that again.
* Then check whether the two Quest objects are equal.
*/
test("parse_quest and write_quest_qst", () => {
const buffer = fs.readFileSync("test/resources/tethealla_v0.143_quests/solo/ep1/02.qst");
const orig_quest = parse_quest(new BufferCursor(buffer, Endianness.Little))!;
const test_quest = parse_quest(
new ArrayBufferCursor(write_quest_qst(orig_quest, "02.qst"), Endianness.Little)
)!;
expect(test_quest.name).toBe(orig_quest.name);
expect(test_quest.short_description).toBe(orig_quest.short_description);
expect(test_quest.long_description).toBe(orig_quest.long_description);
expect(test_quest.episode).toBe(orig_quest.episode);
expect(testable_objects(test_quest)).toEqual(testable_objects(orig_quest));
expect(testable_npcs(test_quest)).toEqual(testable_npcs(orig_quest));
expect(testable_area_variants(test_quest)).toEqual(testable_area_variants(orig_quest));
});
function testable_objects(quest: Quest): any[][] {
return quest.objects.map(object => [
object.area_id,
object.section_id,
object.position,
object.type,
]);
if (process.env["RUN_ALL_TESTS"] === "true") {
walk_qst_files(roundtrip_test);
} else {
const file_name = "quest118_e.qst";
const path = `test/resources/${file_name}`;
const buffer = readFileSync(path);
roundtrip_test(path, file_name, buffer);
}
function testable_npcs(quest: Quest): any[][] {
return quest.npcs.map(npc => [npc.area_id, npc.section_id, npc.position, npc.type]);
function roundtrip_test(path: string, file_name: string, contents: Buffer) {
test(`parse_quest and write_quest_qst ${path}`, () => {
const orig_quest = parse_quest(new BufferCursor(contents, Endianness.Little))!;
const test_bin = write_quest_qst(orig_quest, file_name);
const test_quest = parse_quest(new ArrayBufferCursor(test_bin, Endianness.Little))!;
expect(test_quest.name).toBe(orig_quest.name);
expect(test_quest.short_description).toBe(orig_quest.short_description);
expect(test_quest.long_description).toBe(orig_quest.long_description);
expect(test_quest.episode).toBe(orig_quest.episode);
expect(test_quest.objects.length).toBe(orig_quest.objects.length);
for (let i = 0; i < orig_quest.objects.length; i++) {
const orig_obj = orig_quest.objects[i];
const test_obj = test_quest.objects[i];
expect(test_obj.area_id).toBe(orig_obj.area_id);
expect(test_obj.section_id).toBe(orig_obj.section_id);
expect(test_obj.position).toEqual(orig_obj.position);
expect(test_obj.type.id).toBe(orig_obj.type.id);
}
expect(test_quest.npcs.length).toBe(orig_quest.npcs.length);
for (let i = 0; i < orig_quest.npcs.length; i++) {
const orig_npc = orig_quest.npcs[i];
const test_npc = test_quest.npcs[i];
expect(test_npc.area_id).toBe(orig_npc.area_id);
expect(test_npc.section_id).toBe(orig_npc.section_id);
expect(test_npc.position).toEqual(orig_npc.position);
expect(test_npc.type.id).toBe(orig_npc.type.id);
}
expect(test_quest.area_variants.length).toBe(orig_quest.area_variants.length);
for (let i = 0; i < orig_quest.area_variants.length; i++) {
const orig_area_variant = orig_quest.area_variants[i];
const test_area_variant = test_quest.area_variants[i];
expect(test_area_variant.area.id).toBe(orig_area_variant.area.id);
expect(test_area_variant.id).toBe(orig_area_variant.id);
}
expect(test_quest.instructions.length).toBe(orig_quest.instructions.length);
expect(test_quest.labels.size).toBe(orig_quest.labels.size);
});
}
function testable_area_variants(quest: Quest): any[][] {

View File

@ -2,14 +2,16 @@ import Logger from "js-logger";
import { Endianness } from "../..";
import { AreaVariant, NpcType, ObjectType, Quest, QuestNpc, QuestObject } from "../../../domain";
import { area_store } from "../../../stores/AreaStore";
import * as prs from "../../compression/prs";
import { prs_compress } from "../../compression/prs/compress";
import { prs_decompress } from "../../compression/prs/decompress";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { Vec3 } from "../../vector";
import { BB_MAP_DESIGNATE, Instruction, parse_bin, SET_EPISODE, write_bin, BinFile } from "./bin";
import { BinFile, Instruction, parse_bin, write_bin } from "./bin";
import { DatFile, DatNpc, DatObject, parse_dat, write_dat } from "./dat";
import { parse_qst, QstContainedFile, write_qst } from "./qst";
import { Opcode } from "./opcodes";
const logger = Logger.get("data_formats/parsing/quest");
@ -50,11 +52,14 @@ export function parse_quest(cursor: Cursor, lenient: boolean = false): Quest | u
return;
}
const dat = parse_dat(prs.decompress(new ArrayBufferCursor(dat_file.data, Endianness.Little)));
const bin = parse_bin(
prs.decompress(new ArrayBufferCursor(bin_file.data, Endianness.Little)),
lenient
const dat_decompressed = prs_decompress(
new ArrayBufferCursor(dat_file.data, Endianness.Little)
);
const dat = parse_dat(dat_decompressed);
const bin_decompressed = prs_decompress(
new ArrayBufferCursor(bin_file.data, Endianness.Little)
);
const bin = parse_bin(bin_decompressed, lenient);
let episode = 1;
let area_variants: AreaVariant[] = [];
@ -88,7 +93,6 @@ export function parse_quest(cursor: Cursor, lenient: boolean = false): Quest | u
dat.unknowns,
bin.labels,
bin.instructions,
bin.object_code,
bin.unknown
);
}
@ -107,8 +111,7 @@ export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
quest.short_description,
quest.long_description,
quest.labels,
[],
quest.object_code,
quest.instructions,
quest.bin_unknown
)
);
@ -121,14 +124,14 @@ export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
{
name: base_file_name + ".dat",
id: quest.id,
data: prs
.compress(new ResizableBufferCursor(dat, Endianness.Little))
.array_buffer(),
data: prs_compress(
new ResizableBufferCursor(dat, Endianness.Little)
).array_buffer(),
},
{
name: base_file_name + ".bin",
id: quest.id,
data: prs.compress(new ArrayBufferCursor(bin, Endianness.Little)).array_buffer(),
data: prs_compress(new ArrayBufferCursor(bin, Endianness.Little)).array_buffer(),
},
],
});
@ -138,7 +141,9 @@ export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
* Defaults to episode I.
*/
function get_episode(func_0_instructions: Instruction[]): number {
const set_episode = func_0_instructions.find(instruction => instruction.opcode === SET_EPISODE);
const set_episode = func_0_instructions.find(
instruction => instruction.opcode === Opcode.set_episode
);
if (set_episode) {
switch (set_episode.args[0].value) {
@ -174,7 +179,7 @@ function get_area_variants(
}
const bb_maps = func_0_instructions.filter(
instruction => instruction.opcode === BB_MAP_DESIGNATE
instruction => instruction.opcode === Opcode.bb_map_designate
);
for (const bb_map of bb_maps) {
@ -520,6 +525,8 @@ function objects_to_dat_data(objects: QuestObject[]): DatObject[] {
}
function npcs_to_dat_data(npcs: QuestNpc[]): DatNpc[] {
const dv = new DataView(new ArrayBuffer(4));
return npcs.map(npc => {
const type_data = npc_type_to_dat_data(npc.type) || {
type_id: npc.pso_type_id,
@ -527,11 +534,11 @@ function npcs_to_dat_data(npcs: QuestNpc[]): DatNpc[] {
regular: true,
};
let scale = new Vec3(
npc.scale.x,
(npc.scale.y & ~0x800000) | (type_data.regular ? 0 : 0x800000),
npc.scale.z
);
dv.setFloat32(0, npc.scale.y);
dv.setUint32(0, (dv.getUint32(0) & ~0x800000) | (type_data.regular ? 0 : 0x800000));
const scale_y = dv.getFloat32(0);
let scale = new Vec3(npc.scale.x, scale_y, npc.scale.z);
return {
type_id: type_data.type_id,

File diff suppressed because it is too large Load Diff

View File

@ -97,8 +97,8 @@ export function write_qst(params: WriteQstParams): ArrayBuffer {
write_file_headers(cursor, files);
write_file_chunks(cursor, files);
if (cursor.size !== total_size) {
throw new Error(`Expected a final file size of ${total_size}, but got ${cursor.size}.`);
if (cursor.position !== total_size) {
throw new Error(`Expected a final file size of ${total_size}, but got ${cursor.position}.`);
}
return buffer;
@ -251,7 +251,7 @@ function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstCo
function write_file_headers(cursor: WritableCursor, files: QstContainedFileParam[]): void {
for (const file of files) {
if (file.name.length > 16) {
throw Error(`File ${file.name} has a name longer than 16 characters.`);
throw new Error(`File ${file.name} has a name longer than 16 characters.`);
}
cursor.write_u16(88); // Header size.
@ -291,24 +291,39 @@ function write_file_headers(cursor: WritableCursor, files: QstContainedFileParam
function write_file_chunks(cursor: WritableCursor, files: QstContainedFileParam[]): void {
// Files are interleaved in 1056 byte chunks.
// Each chunk has a 24 byte header, 1024 byte data segment and an 8 byte trailer.
const chunks = files.map(file => ({
const files_to_chunk = files.map(file => ({
no: 0,
data: new ArrayBufferCursor(file.data, Endianness.Little),
name: file.name,
}));
let done = 0;
while (chunks.length) {
let i = 0;
while (i < chunks.length) {
if (!write_file_chunk(cursor, chunks[i].data, chunks[i].no++, chunks[i].name)) {
// Remove if there are no more chunks to write.
chunks.splice(i, 1);
} else {
++i;
while (done < files_to_chunk.length) {
for (const file_to_chunk of files_to_chunk) {
if (file_to_chunk.data.bytes_left) {
if (
!write_file_chunk(
cursor,
file_to_chunk.data,
file_to_chunk.no++,
file_to_chunk.name
)
) {
done++;
}
}
}
}
for (const file_to_chunk of files_to_chunk) {
const expected_chunks = Math.ceil(file_to_chunk.data.size / 1024);
if (file_to_chunk.no !== expected_chunks) {
throw new Error(
`Expected to write ${expected_chunks} chunks for file "${file_to_chunk.name}" but ${file_to_chunk.no} where written.`
);
}
}
}
/**

View File

@ -1,11 +1,11 @@
import { decompress } from "../compression/prs";
import { prs_decompress } from "../compression/prs/decompress";
import { Cursor } from "../cursor/Cursor";
export type Unitxt = string[][];
export function parse_unitxt(buf: Cursor, compressed: boolean = true): Unitxt {
if (compressed) {
buf = decompress(buf);
buf = prs_decompress(buf);
}
const category_count = buf.u32();

View File

@ -95,7 +95,6 @@ export class Quest {
dat_unknowns: DatUnknown[];
labels: Map<number, number>;
instructions: Instruction[];
object_code: ArrayBuffer;
bin_unknown: ArrayBuffer;
constructor(
@ -111,7 +110,6 @@ export class Quest {
dat_unknowns: DatUnknown[],
labels: Map<number, number>,
instructions: Instruction[],
object_code: ArrayBuffer,
bin_unknown: ArrayBuffer
) {
if (!Number.isInteger(id) || id < 0)
@ -124,7 +122,6 @@ export class Quest {
if (!dat_unknowns) throw new Error("dat_unknowns is required.");
if (!labels) throw new Error("labels is required.");
if (!instructions) throw new Error("instructions is required.");
if (!object_code) throw new Error("object_code is required.");
if (!bin_unknown) throw new Error("bin_unknown is required.");
this.id = id;
@ -139,7 +136,6 @@ export class Quest {
this.dat_unknowns = dat_unknowns;
this.labels = labels;
this.instructions = instructions;
this.object_code = object_code;
this.bin_unknown = bin_unknown;
}
}

5
src/setupTests.js Normal file
View File

@ -0,0 +1,5 @@
import Logger from "js-logger";
Logger.useDefaults({
defaultLevel: Logger[process.env["REACT_APP_LOG_LEVEL"] || "OFF"],
});

View File

@ -1,6 +1,9 @@
import { Instruction, Opcode } from "../data_formats/parsing/quest/bin";
import { Vec3 } from "../data_formats/vector";
import { Episode, NpcType, ObjectType, Quest, QuestNpc, QuestObject } from "../domain";
import { area_store } from "./AreaStore";
import { WritableArrayBufferCursor } from "../data_formats/cursor/WritableArrayBufferCursor";
import { Endianness } from "../data_formats";
export function create_new_quest(episode: Episode): Quest {
if (episode === Episode.II) throw new Error("Episode II not yet supported.");
@ -16,13 +19,45 @@ export function create_new_quest(episode: Episode): Quest {
create_default_objects(),
create_default_npcs(),
[],
new Map(),
[],
new ArrayBuffer(0),
new ArrayBuffer(0)
new Map([[0, 0], [1, 6]]),
[
new Instruction(Opcode.set_episode, [{ value: 0, size: 4 }]),
new Instruction(Opcode.arg_pushl, [{ value: 0, size: 4 }]),
new Instruction(Opcode.arg_pushw, [{ value: 1, size: 2 }]),
new Instruction(Opcode.set_floor_handler, []),
new Instruction(Opcode.bb_map_designate, [
{ value: 0, size: 1 },
{ value: 0, size: 2 },
{ value: 0, size: 1 },
{ value: 0, size: 1 },
]),
new Instruction(Opcode.ret, []),
new Instruction(Opcode.ret, []),
],
create_bin_unknown()
);
}
function create_bin_unknown(): ArrayBuffer {
const buffer = new ArrayBuffer(3732);
const cursor = new WritableArrayBufferCursor(buffer, Endianness.Little);
cursor.write_u32(0);
for (let i = 0; i < 16; i++) {
cursor.write_u8(0xff);
}
for (let i = 0; i < 16; i++) {
cursor.write_u8(0);
}
for (let i = 0; i < 112; i++) {
cursor.write_u8(0xff);
}
return buffer;
}
function create_default_objects(): QuestObject[] {
return [
new QuestObject(

View File

@ -55,7 +55,7 @@ const ASM_SYNTAX: languages.IMonarchLanguage = {
},
};
const INSTRUCTION_SUGGESTIONS = OPCODES.map(opcode => {
const INSTRUCTION_SUGGESTIONS = OPCODES.filter(opcode => opcode != null).map(opcode => {
return ({
label: opcode.mnemonic,
kind: languages.CompletionItemKind.Function,
@ -131,10 +131,6 @@ class MonacoComponent extends Component<MonacoProps> {
componentDidMount(): void {
if (this.div_ref.current) {
// model.onDidChangeContent(e => {
// e.changes[0].range
// })
this.editor = editor.create(this.div_ref.current, {
theme: "phantasmal-world",
scrollBeyondLastLine: false,
@ -146,6 +142,9 @@ class MonacoComponent extends Component<MonacoProps> {
const model = quest && editor.createModel(disassemble(quest), "psoasm");
if (model && this.editor) {
// model.onDidChangeContent(e => {
// });
this.editor.setModel(model);
}
});

View File

@ -1,12 +1,12 @@
import { Arg, Type } from "../../data_formats/parsing/quest/bin";
import { Arg, Param, Type } from "../../data_formats/parsing/quest/bin";
import { Quest } from "../../domain";
/**
* @param manual_stack If true, will ouput stack management instructions (argpush variants). Otherwise stack management instructions will not be output and their arguments will be output as arguments to the instruction that pops them from the stack.
*/
export function disassemble(quest: Quest, manual_stack: boolean = false): string {
const lines: string[] = [];
const index_to_label = [...quest.labels.entries()].reduce(
(map, [l, i]) => map.set(i, l),
new Map<number, number>()
);
const index_to_label = new Map([...quest.labels.entries()].map(([l, i]) => [i, l]));
const stack: Arg[] = [];
@ -17,25 +17,18 @@ export function disassemble(quest: Quest, manual_stack: boolean = false): string
if (!manual_stack && ins.opcode.push_stack) {
stack.push(...ins.args);
} else {
let args: string[] = [];
for (let j = 0; j < ins.opcode.params.length; j++) {
const param_type = ins.opcode.params[j];
const arg = ins.args[j];
args.push(...arg_to_strings(param_type, arg));
}
let args = args_to_strings(ins.opcode.params, ins.args);
if (!manual_stack) {
for (let j = ins.opcode.stack_params.length - 1; j >= 0; j--) {
const param_type = ins.opcode.stack_params[j];
const arg = stack.pop();
if (!arg) {
break;
}
args.push(...arg_to_strings(param_type, arg));
}
args.push(
...args_to_strings(
ins.opcode.stack_params,
stack.splice(
Math.max(0, stack.length - ins.opcode.stack_params.length),
ins.opcode.stack_params.length
)
)
);
}
if (label != null) {
@ -46,23 +39,45 @@ export function disassemble(quest: Quest, manual_stack: boolean = false): string
}
}
// Ensure newline.
if (lines.length) {
lines.push("");
}
return lines.join("\n");
}
function arg_to_strings(param_type: Type, arg: Arg): string[] {
switch (param_type) {
case Type.U8:
case Type.U16:
case Type.U32:
case Type.I32:
case Type.F32:
return [arg.value.toString()];
case Type.Register:
return ["r" + arg.value];
case Type.SwitchData:
case Type.JumpData:
return arg.value.map(String);
case Type.String:
return [JSON.stringify(arg.value)];
function args_to_strings(params: Param[], args: Arg[]): string[] {
const arg_strings: string[] = [];
for (let i = 0; i < params.length; i++) {
const type = params[i].type;
const arg = args[i];
if (arg == null) {
arg_strings.push("");
continue;
}
switch (type) {
case Type.U8Var:
case Type.U16Var:
for (; i < args.length; i++) {
arg_strings.push(args[i].value.toString());
}
break;
case Type.Register:
arg_strings.push("r" + arg.value);
break;
case Type.String:
arg_strings.push(JSON.stringify(arg.value));
break;
default:
arg_strings.push(arg.value.toString());
break;
}
}
return arg_strings;
}