- Moved instructions/opcodes and DFA code to core to avoid dependency from core to quest_editor

- When the assembly worker updates map designations, it now takes map_designate and map_designate_ex into account
This commit is contained in:
Daan Vanden Bosch 2020-01-02 18:42:08 +01:00
parent f36b102ec1
commit 93e05ea614
41 changed files with 1047 additions and 886 deletions

View File

@ -10,8 +10,8 @@ const logger = LogManager.get("assets_generation/update_generic_data");
LogManager.default_level = LogLevel.Trace;
const OPCODES_YML_FILE = `${RESOURCE_DIR}/scripting/opcodes.yml`;
const OPCODES_SRC_FILE = `${SRC_DIR}/quest_editor/scripting/opcodes.ts`;
const OPCODES_YML_FILE = `${RESOURCE_DIR}/asm/opcodes.yml`;
const OPCODES_SRC_FILE = `${SRC_DIR}/core/data_formats/asm/opcodes.ts`;
update();

View File

@ -1,4 +1,4 @@
import { assemble } from "../assembly";
import { assemble } from "../../../../quest_editor/scripting/assembly";
import { InstructionSegment, SegmentType } from "../instructions";
import { BranchType, ControlFlowGraph } from "./ControlFlowGraph";

View File

@ -0,0 +1,54 @@
import { InstructionSegment } from "../instructions";
import { ControlFlowGraph } from "./ControlFlowGraph";
import { OP_BB_MAP_DESIGNATE, OP_MAP_DESIGNATE, OP_MAP_DESIGNATE_EX } from "../opcodes";
import { get_register_value } from "./get_register_value";
import { LogManager } from "../../../Logger";
const logger = LogManager.get("core/data_formats/asm/data_flow_analysis/map_designations");
export function get_map_designations(
instruction_segments: InstructionSegment[],
func_0_segment: InstructionSegment,
): Map<number, number> {
const map_designations = new Map<number, number>();
let cfg: ControlFlowGraph | undefined;
for (const inst of func_0_segment.instructions) {
switch (inst.opcode.code) {
case OP_MAP_DESIGNATE.code:
case OP_MAP_DESIGNATE_EX.code:
{
if (!cfg) {
cfg = ControlFlowGraph.create(instruction_segments);
}
const area_id = get_register_value(cfg, inst, inst.args[0].value);
if (area_id.size() !== 1) {
logger.warn(`Couldn't determine area ID for map_designate instruction.`);
continue;
}
const variant_id_register =
inst.args[0].value + (inst.opcode.code === OP_MAP_DESIGNATE.code ? 2 : 3);
const variant_id = get_register_value(cfg, inst, variant_id_register);
if (variant_id.size() !== 1) {
logger.warn(
`Couldn't determine area variant ID for map_designate instruction.`,
);
continue;
}
map_designations.set(area_id.get(0)!, variant_id.get(0)!);
}
break;
case OP_BB_MAP_DESIGNATE.code:
map_designations.set(inst.args[0].value, inst.args[2].value);
break;
}
}
return map_designations;
}

View File

@ -3,10 +3,10 @@ import { ControlFlowGraph } from "./ControlFlowGraph";
import {
MAX_REGISTER_VALUE,
MIN_REGISTER_VALUE,
register_value,
get_register_value,
REGISTER_VALUES,
} from "./register_value";
import { to_instructions } from "../../../../test/src/utils";
} from "./get_register_value";
import { to_instructions } from "../../../../../test/src/utils";
test(`trivial case`, () => {
const im = to_instructions(`
@ -14,7 +14,7 @@ test(`trivial case`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const values = register_value(cfg, im[0].instructions[0], 6);
const values = get_register_value(cfg, im[0].instructions[0], 6);
expect(values.size()).toBe(0);
});
@ -26,7 +26,7 @@ test(`single assignment`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const values = register_value(cfg, im[0].instructions[1], 6);
const values = get_register_value(cfg, im[0].instructions[1], 6);
expect(values.size()).toBe(1);
expect(values.get(0)).toBe(1337);
@ -44,7 +44,7 @@ test(`two code paths`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const values = register_value(cfg, im[2].instructions[0], 10);
const values = get_register_value(cfg, im[2].instructions[0], 10);
expect(values.size()).toBe(2);
expect(values.get(0)).toBe(111);
@ -59,7 +59,7 @@ test(`loop`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const values = register_value(cfg, im[0].instructions[2], 10);
const values = get_register_value(cfg, im[0].instructions[2], 10);
expect(values.size()).toBe(REGISTER_VALUES);
});
@ -72,13 +72,13 @@ test(`leta and leto`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const r0 = register_value(cfg, im[0].instructions[2], 0);
const r0 = get_register_value(cfg, im[0].instructions[2], 0);
expect(r0.size()).toBe(REGISTER_VALUES);
expect(r0.min()).toBe(MIN_REGISTER_VALUE);
expect(r0.max()).toBe(MAX_REGISTER_VALUE);
const r1 = register_value(cfg, im[0].instructions[2], 1);
const r1 = get_register_value(cfg, im[0].instructions[2], 1);
expect(r1.size()).toBe(REGISTER_VALUES);
expect(r1.min()).toBe(MIN_REGISTER_VALUE);
@ -101,17 +101,17 @@ test(`rev`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const v0 = register_value(cfg, im[0].instructions[4], 10);
const v0 = get_register_value(cfg, im[0].instructions[4], 10);
expect(v0.size()).toBe(1);
expect(v0.get(0)).toBe(0);
const v1 = register_value(cfg, im[0].instructions[8], 10);
const v1 = get_register_value(cfg, im[0].instructions[8], 10);
expect(v1.size()).toBe(2);
expect(v1.to_array()).toEqual([0, 1]);
const v2 = register_value(cfg, im[0].instructions[10], 10);
const v2 = get_register_value(cfg, im[0].instructions[10], 10);
expect(v2.size()).toBe(1);
expect(v2.get(0)).toBe(1);
@ -133,7 +133,7 @@ function test_branched(opcode: Opcode, ...expected: number[]): void {
ret
`);
const cfg = ControlFlowGraph.create(im);
const values = register_value(cfg, im[1].instructions[1], 99);
const values = get_register_value(cfg, im[1].instructions[1], 99);
expect(values.size()).toBe(expected.length);
expect(values.to_array()).toEqual(expected);
@ -158,17 +158,17 @@ test(`get_random`, () => {
ret
`);
const cfg = ControlFlowGraph.create(im);
const v0 = register_value(cfg, im[0].instructions[3], 10);
const v0 = get_register_value(cfg, im[0].instructions[3], 10);
expect(v0.size()).toBe(1);
expect(v0.get(0)).toBe(20);
const v1 = register_value(cfg, im[0].instructions[5], 10);
const v1 = get_register_value(cfg, im[0].instructions[5], 10);
expect(v1.size()).toBe(1);
expect(v1.get(0)).toBe(20);
const v2 = register_value(cfg, im[0].instructions[7], 10);
const v2 = get_register_value(cfg, im[0].instructions[7], 10);
expect(v2.size()).toBe(5);
expect(v2.to_array()).toEqual([20, 21, 22, 23, 24]);

View File

@ -25,9 +25,9 @@ import {
} from "../opcodes";
import { BasicBlock, ControlFlowGraph } from "./ControlFlowGraph";
import { ValueSet } from "./ValueSet";
import { LogManager } from "../../../core/Logger";
import { LogManager } from "../../../Logger";
const logger = LogManager.get("quest_editor/scripting/data_flow_analysis/register_value");
const logger = LogManager.get("core/data_formats/asm/data_flow_analysis/register_value");
export const MIN_REGISTER_VALUE = MIN_SIGNED_DWORD_VALUE;
export const MAX_REGISTER_VALUE = MAX_SIGNED_DWORD_VALUE;
@ -36,7 +36,7 @@ export const REGISTER_VALUES = Math.pow(2, 32);
/**
* Computes the possible values of a register right before a specific instruction.
*/
export function register_value(
export function get_register_value(
cfg: ControlFlowGraph,
instruction: Instruction,
register: number,

View File

@ -13,10 +13,10 @@ import {
} from "../opcodes";
import { BasicBlock, ControlFlowGraph } from "./ControlFlowGraph";
import { ValueSet } from "./ValueSet";
import { register_value } from "./register_value";
import { LogManager } from "../../../core/Logger";
import { get_register_value } from "./get_register_value";
import { LogManager } from "../../../Logger";
const logger = LogManager.get("quest_editor/scripting/data_flow_analysis/stack_value");
const logger = LogManager.get("core/data_formats/asm/data_flow_analysis/stack_value");
export const MIN_STACK_VALUE = MIN_SIGNED_DWORD_VALUE;
export const MAX_STACK_VALUE = MAX_SIGNED_DWORD_VALUE;
@ -24,7 +24,7 @@ export const MAX_STACK_VALUE = MAX_SIGNED_DWORD_VALUE;
/**
* Computes the possible values of a stack element at the nth position from the top right before a specific instruction.
*/
export function stack_value(
export function get_stack_value(
cfg: ControlFlowGraph,
instruction: Instruction,
position: number,
@ -75,7 +75,7 @@ function find_values(
switch (instruction.opcode.code) {
case OP_ARG_PUSHR.code:
if (position === 0) {
return register_value(ctx.cfg, instruction, args[0].value);
return get_register_value(ctx.cfg, instruction, args[0].value);
} else {
position--;
break;

View File

@ -1,5 +1,5 @@
import { Kind, Opcode } from "./opcodes";
import { array_buffers_equal, arrays_equal } from "../../core/util";
import { array_buffers_equal, arrays_equal } from "../../util";
/**
* Instruction invocation.

View File

@ -66,6 +66,12 @@ export abstract class AbstractWritableCursor extends AbstractCursor implements W
return this;
}
write_i32_array(array: readonly number[]): this {
this.write_i32_array_at(this.position, array);
this._position += array.length * 4;
return this;
}
write_vec2_f32(value: Vec2): this {
this.write_vec2_f32_at(this.position, value);
this._position += 8;
@ -173,6 +179,17 @@ export abstract class AbstractWritableCursor extends AbstractCursor implements W
return this;
}
write_i32_array_at(offset: number, array: readonly number[]): this {
this.ensure_size(4 * array.length, offset);
const len = array.length;
for (let i = 0; i < len; i++) {
this.write_i32_at(offset + i * 4, array[i]);
}
return this;
}
write_vec2_f32_at(offset: number, value: Vec2): this {
this.ensure_size(8, offset);
this.dv.setFloat32(offset, value.x, this.little_endian);

View File

@ -139,24 +139,51 @@ test_integer_read("i32");
test_all(
"u8_array",
() => [1, 2, 3, 4, 5, 6, 7, 8],
() => [1, 2, 0xff, 4, 5, 6, 7, 8],
cursor => {
expect(cursor.u8_array(3)).toEqual([1, 2, 3]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.u8_array(3)).toEqual([1, 2, 0xff]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([0xff, 4, 5, 6]);
expect(cursor.seek_start(5).u8_array(3)).toEqual([6, 7, 8]);
},
);
test_all(
"u16_array",
() => [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8],
() => [1, 1, 2, 2, 0xff, 0xff, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8],
cursor => {
expect(cursor.u16_array(3)).toEqual([0x0101, 0x0202, 0x0303]);
expect(cursor.seek_start(4).u16_array(4)).toEqual([0x0303, 0x0404, 0x0505, 0x0606]);
expect(cursor.u16_array(3)).toEqual([0x0101, 0x0202, 0xffff]);
expect(cursor.seek_start(4).u16_array(4)).toEqual([0xffff, 0x0404, 0x0505, 0x0606]);
expect(cursor.seek_start(10).u16_array(3)).toEqual([0x0606, 0x0707, 0x0808]);
},
);
test_all(
"u32_array",
// prettier-ignore
() => [1, 1, 1, 1, 2, 2, 2, 2, 0xff, 0xff, 0xff, 0xff, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8],
cursor => {
expect(cursor.u32_array(3)).toEqual([0x01010101, 0x02020202, 0xffffffff]);
expect(cursor.seek_start(8).u32_array(4)).toEqual([
0xffffffff,
0x04040404,
0x05050505,
0x06060606,
]);
expect(cursor.seek_start(20).u32_array(3)).toEqual([0x06060606, 0x07070707, 0x08080808]);
},
);
test_all(
"i32_array",
// prettier-ignore
() => [1, 1, 1, 1, 2, 2, 2, 2, 0xff, 0xff, 0xff, 0xff, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8],
cursor => {
expect(cursor.i32_array(3)).toEqual([0x01010101, 0x02020202, -1]);
expect(cursor.seek_start(8).i32_array(4)).toEqual([-1, 0x04040404, 0x05050505, 0x06060606]);
expect(cursor.seek_start(20).i32_array(3)).toEqual([0x06060606, 0x07070707, 0x08080808]);
},
);
function test_string_read(method_name: string, char_size: number): void {
// Array of bytes per endianness.
const bytes: [number[], number[]] = [[], []];

View File

@ -57,6 +57,11 @@ export interface WritableCursor extends Cursor {
*/
write_u32_array(array: number[]): this;
/**
* Writes an array of signed 32-bit integers and increments position by four times the array's length.
*/
write_i32_array(array: readonly number[]): this;
/**
* Writes two 32-bit floating point numbers and increments position by 8.
*/
@ -132,6 +137,11 @@ export interface WritableCursor extends Cursor {
*/
write_u32_array_at(offset: number, array: number[]): this;
/**
* Writes an array of signed 32-bit integers at the given absolute offset. Doesn't increment position.
*/
write_i32_array_at(offset: number, array: readonly number[]): this;
/**
* Writes two 32-bit floating point numbers at the given absolute offset. Doesn't increment position.
*/

View File

@ -4,7 +4,6 @@ import { prs_decompress } from "../../compression/prs/decompress";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { BufferCursor } from "../../cursor/BufferCursor";
import { parse_bin, write_bin } from "./bin";
import { Version } from "./Version";
/**
* Parse a file, convert the resulting structure to BIN again and check whether the end result is equal to the original.
@ -12,7 +11,7 @@ import { Version } from "./Version";
function test_quest(path: string): void {
const orig_buffer = readFileSync(path);
const orig_bin = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_buffer = write_bin(parse_bin(orig_bin));
const test_buffer = write_bin(parse_bin(orig_bin).bin);
const test_bin = new ArrayBufferCursor(test_buffer, Endianness.Little);
orig_bin.seek_start(0);

View File

@ -1,32 +1,8 @@
import { Endianness } from "../../Endianness";
import { ControlFlowGraph } from "../../../../quest_editor/scripting/data_flow_analysis/ControlFlowGraph";
import { register_value } from "../../../../quest_editor/scripting/data_flow_analysis/register_value";
import { stack_value } from "../../../../quest_editor/scripting/data_flow_analysis/stack_value";
import {
Arg,
DataSegment,
Instruction,
InstructionSegment,
new_arg,
new_instruction,
Segment,
SegmentType,
StringSegment,
} from "../../../../quest_editor/scripting/instructions";
import {
Kind,
OP_JMP,
OP_RET,
Opcode,
OPCODES,
StackInteraction,
} from "../../../../quest_editor/scripting/opcodes";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { WritableCursor } from "../../cursor/WritableCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
import { LogManager } from "../../../Logger";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { assert } from "../../../util";
const logger = LogManager.get("core/data_formats/parsing/quest/bin");
@ -36,20 +12,12 @@ export type BinFile = {
readonly quest_name: string;
readonly short_description: string;
readonly long_description: string;
readonly object_code: readonly Segment[];
readonly object_code: ArrayBuffer;
readonly label_offsets: readonly number[];
readonly shop_items: readonly number[];
};
const SEGMENT_PRIORITY: number[] = [];
SEGMENT_PRIORITY[SegmentType.Instructions] = 2;
SEGMENT_PRIORITY[SegmentType.String] = 1;
SEGMENT_PRIORITY[SegmentType.Data] = 0;
export function parse_bin(
cursor: Cursor,
entry_labels: number[] = [0],
lenient: boolean = false,
): BinFile {
export function parse_bin(cursor: Cursor): { bin: BinFile; dc_gc_format: boolean } {
const object_code_offset = cursor.u32();
const label_offset_table_offset = cursor.u32(); // Relative offsets
const size = cursor.u32();
@ -89,40 +57,37 @@ export function parse_bin(
const label_offset_count = Math.floor((cursor.size - label_offset_table_offset) / 4);
cursor.seek_start(label_offset_table_offset);
const label_offset_table = cursor.i32_array(label_offset_count);
const label_holder = new LabelHolder(label_offset_table);
const label_offsets = cursor.i32_array(label_offset_count);
const object_code = cursor
.seek_start(object_code_offset)
.take(label_offset_table_offset - object_code_offset);
const segments = parse_object_code(
object_code,
label_holder,
entry_labels,
lenient,
dc_gc_format,
);
.array_buffer(label_offset_table_offset - object_code_offset);
return {
quest_id,
language,
quest_name,
short_description,
long_description,
object_code: segments,
shop_items,
bin: {
quest_id,
language,
quest_name,
short_description,
long_description,
object_code,
label_offsets,
shop_items,
},
dc_gc_format,
};
}
export function write_bin(bin: BinFile): ArrayBuffer {
const object_code_offset = 4652;
const buffer = new ResizableBuffer(object_code_offset + 100 * bin.object_code.length);
const cursor = new ResizableBufferCursor(buffer, Endianness.Little);
const file_size =
object_code_offset + bin.object_code.byteLength + 4 * bin.label_offsets.length;
const buffer = new ArrayBuffer(file_size);
const cursor = new ArrayBufferCursor(buffer, Endianness.Little);
cursor.write_u32(object_code_offset);
cursor.write_u32(0); // Placeholder for the labels offset.
cursor.write_u32(0); // Placeholder for the file size.
cursor.write_u32(object_code_offset + bin.object_code.byteLength); // Label table offset.
cursor.write_u32(file_size);
cursor.write_u32(0xffffffff);
cursor.write_u32(bin.quest_id);
cursor.write_u32(bin.language);
@ -145,691 +110,14 @@ export function write_bin(bin: BinFile): ArrayBuffer {
cursor.write_u8(0);
}
const { size: object_code_size, label_offsets } = write_object_code(cursor, bin.object_code);
cursor.write_cursor(new ArrayBufferCursor(bin.object_code, Endianness.Little));
for (let label = 0; label < label_offsets.length; label++) {
const offset = label_offsets[label];
cursor.write_i32_array(bin.label_offsets);
if (offset == undefined) {
cursor.write_i32(-1);
} else {
cursor.write_i32(offset);
}
}
const file_size = cursor.position;
cursor.seek_start(4);
cursor.write_u32(object_code_offset + object_code_size);
cursor.write_u32(file_size);
return cursor.seek_start(0).array_buffer(file_size);
}
class LabelHolder {
/**
* Labels and their offset sorted by offset and then label.
*/
labels: { label: number; offset: number }[] = [];
/**
* Mapping of labels to their offset and index into labels.
*/
private label_map: Map<number, { offset: number; index: number }> = new Map();
/**
* Mapping of offsets to lists of labels.
*/
private offset_map: Map<number, number[]> = new Map();
constructor(label_offset_table: number[]) {
// Populate the main label list.
for (let label = 0; label < label_offset_table.length; label++) {
const offset = label_offset_table[label];
if (offset !== -1) {
this.labels.push({ label, offset });
}
}
// Sort by offset, then label.
this.labels.sort((a, b) => a.offset - b.offset || a.label - b.label);
// Populate the label and offset maps.
for (let index = 0; index < this.labels.length; index++) {
const { label, offset } = this.labels[index];
this.label_map.set(label, { offset, index });
const labels = this.offset_map.get(offset) || [];
labels.push(label);
this.offset_map.set(offset, labels);
}
}
get_labels(offset: number): number[] | undefined {
return this.offset_map.get(offset);
}
get_info(
label: number,
): { offset: number; next?: { label: number; offset: number } } | undefined {
const offset_and_index = this.label_map.get(label);
if (offset_and_index == undefined) {
return undefined;
}
// Find the next label with a different offset.
let next: { label: number; offset: number } | undefined;
for (let i = offset_and_index.index + 1; i < this.labels.length; i++) {
next = this.labels[i];
// Skip the label if it points to the same offset.
if (next.offset > offset_and_index.offset) {
break;
} else {
next = undefined;
}
}
return {
offset: offset_and_index.offset,
next,
};
}
}
function parse_object_code(
cursor: Cursor,
label_holder: LabelHolder,
entry_labels: number[],
lenient: boolean,
dc_gc_format: boolean,
): Segment[] {
const offset_to_segment = new Map<number, Segment>();
find_and_parse_segments(
cursor,
label_holder,
entry_labels.reduce((m, l) => m.set(l, SegmentType.Instructions), new Map()),
offset_to_segment,
lenient,
dc_gc_format,
assert(
cursor.position === file_size,
`Expected to write ${file_size} bytes, but wrote ${cursor.position}.`,
);
const segments: Segment[] = [];
// Put segments in an array and parse left-over segments as data.
let offset = 0;
while (offset < cursor.size) {
let segment: Segment | undefined = offset_to_segment.get(offset);
// If we have a segment, add it. Otherwise create a new data segment.
if (!segment) {
const labels = label_holder.get_labels(offset);
let end_offset: number;
if (labels) {
const info = label_holder.get_info(labels[0])!;
end_offset = info.next ? info.next.offset : cursor.size;
} else {
end_offset = cursor.size;
for (const label of label_holder.labels) {
if (label.offset > offset) {
end_offset = label.offset;
break;
}
}
}
cursor.seek_start(offset);
parse_data_segment(offset_to_segment, cursor, end_offset, labels || []);
segment = offset_to_segment.get(offset);
// Should never happen.
if (end_offset <= offset) {
logger.error(
`Next offset ${end_offset} was smaller than or equal to current offset ${offset}.`,
);
break;
}
// Should never happen either.
if (!segment) {
logger.error(`Couldn't create segment for offset ${offset}.`);
break;
}
}
segments.push(segment);
switch (segment.type) {
case SegmentType.Instructions:
for (const instruction of segment.instructions) {
offset += instruction.size;
}
break;
case SegmentType.Data:
offset += segment.data.byteLength;
break;
case SegmentType.String:
// String segments should be multiples of 4 bytes.
offset += 4 * Math.ceil((segment.value.length + 1) / 2);
break;
default:
throw new Error(`${SegmentType[segment!.type]} not implemented.`);
}
}
// Add unreferenced labels to their segment.
for (const { label, offset } of label_holder.labels) {
const segment = offset_to_segment.get(offset);
if (segment) {
if (!segment.labels.includes(label)) {
segment.labels.push(label);
segment.labels.sort((a, b) => a - b);
}
} else {
logger.warn(`Label ${label} with offset ${offset} does not point to anything.`);
}
}
// Sanity check parsed object code.
if (cursor.size !== offset) {
const message = `Expected to parse ${cursor.size} bytes but parsed ${offset} instead.`;
if (lenient) {
logger.error(message);
} else {
throw new Error(message);
}
}
return segments;
}
function find_and_parse_segments(
cursor: Cursor,
label_holder: LabelHolder,
labels: Map<number, SegmentType>,
offset_to_segment: Map<number, Segment>,
lenient: boolean,
dc_gc_format: boolean,
): void {
let start_segment_count: number;
// Iteratively parse segments from label references.
do {
start_segment_count = offset_to_segment.size;
for (const [label, type] of labels) {
parse_segment(
offset_to_segment,
label_holder,
cursor,
label,
type,
lenient,
dc_gc_format,
);
}
// Find label references.
const sorted_segments = [...offset_to_segment.entries()]
.filter(([, s]) => s.type === SegmentType.Instructions)
.sort(([a], [b]) => a - b)
.map(([, s]) => s as InstructionSegment);
const cfg = ControlFlowGraph.create(sorted_segments);
labels = new Map();
for (const segment of sorted_segments) {
for (const instruction of segment.instructions) {
for (let i = 0; i < instruction.opcode.params.length; i++) {
const param = instruction.opcode.params[i];
switch (param.type.kind) {
case Kind.ILabel:
get_arg_label_values(
cfg,
labels,
instruction,
i,
SegmentType.Instructions,
);
break;
case Kind.ILabelVar:
// Never on the stack.
// Eat all remaining arguments.
for (; i < instruction.args.length; i++) {
labels.set(instruction.args[i].value, SegmentType.Instructions);
}
break;
case Kind.DLabel:
get_arg_label_values(cfg, labels, instruction, i, SegmentType.Data);
break;
case Kind.SLabel:
get_arg_label_values(cfg, labels, instruction, i, SegmentType.String);
break;
case Kind.RegTupRef:
{
// Never on the stack.
const arg = instruction.args[i];
for (let j = 0; j < param.type.register_tuples.length; j++) {
const reg_tup = param.type.register_tuples[j];
if (reg_tup.type.kind === Kind.ILabel) {
const label_values = register_value(
cfg,
instruction,
arg.value + j,
);
if (label_values.size() <= 10) {
for (const label of label_values) {
labels.set(label, SegmentType.Instructions);
}
}
}
}
}
break;
}
}
}
}
} while (offset_to_segment.size > start_segment_count);
}
/**
* @returns immediate arguments or stack arguments.
*/
function get_arg_label_values(
cfg: ControlFlowGraph,
labels: Map<number, SegmentType>,
instruction: Instruction,
param_idx: number,
segment_type: SegmentType,
): void {
if (instruction.opcode.stack === StackInteraction.Pop) {
const stack_values = stack_value(
cfg,
instruction,
instruction.opcode.params.length - param_idx - 1,
);
if (stack_values.size() <= 10) {
for (const value of stack_values) {
const old_type = labels.get(value);
if (
old_type == undefined ||
SEGMENT_PRIORITY[segment_type] > SEGMENT_PRIORITY[old_type]
) {
labels.set(value, segment_type);
}
}
}
} else {
const value = instruction.args[param_idx].value;
const old_type = labels.get(value);
if (old_type == undefined || SEGMENT_PRIORITY[segment_type] > SEGMENT_PRIORITY[old_type]) {
labels.set(value, segment_type);
}
}
}
function parse_segment(
offset_to_segment: Map<number, Segment>,
label_holder: LabelHolder,
cursor: Cursor,
label: number,
type: SegmentType,
lenient: boolean,
dc_gc_format: boolean,
): void {
try {
const info = label_holder.get_info(label);
if (info == undefined) {
logger.warn(`Label ${label} is not registered in the label table.`);
return;
}
// Check whether we've already parsed this segment and reparse it if necessary.
const segment = offset_to_segment.get(info.offset);
let labels: number[];
if (segment) {
if (!segment.labels.includes(label)) {
segment.labels.push(label);
segment.labels.sort((a, b) => a - b);
}
if (SEGMENT_PRIORITY[type] > SEGMENT_PRIORITY[segment.type]) {
labels = segment.labels;
} else {
return;
}
} else {
labels = [label];
}
const end_offset = info.next ? info.next.offset : cursor.size;
cursor.seek_start(info.offset);
switch (type) {
case SegmentType.Instructions:
parse_instructions_segment(
offset_to_segment,
label_holder,
cursor,
end_offset,
labels,
info.next && info.next.label,
lenient,
dc_gc_format,
);
break;
case SegmentType.Data:
parse_data_segment(offset_to_segment, cursor, end_offset, labels);
break;
case SegmentType.String:
parse_string_segment(offset_to_segment, cursor, end_offset, labels, dc_gc_format);
break;
default:
throw new Error(`Segment type ${SegmentType[type]} not implemented.`);
}
} catch (e) {
if (lenient) {
logger.error("Couldn't fully parse object code segment.", e);
} else {
throw e;
}
}
}
function parse_instructions_segment(
offset_to_segment: Map<number, Segment>,
label_holder: LabelHolder,
cursor: Cursor,
end_offset: number,
labels: number[],
next_label: number | undefined,
lenient: boolean,
dc_gc_format: boolean,
): void {
const instructions: Instruction[] = [];
const segment: InstructionSegment = {
type: SegmentType.Instructions,
labels,
instructions,
asm: { labels: [] },
};
offset_to_segment.set(cursor.position, segment);
while (cursor.position < end_offset) {
// Parse the opcode.
const main_opcode = cursor.u8();
let opcode_index;
switch (main_opcode) {
case 0xf8:
case 0xf9:
opcode_index = (main_opcode << 8) | cursor.u8();
break;
default:
opcode_index = main_opcode;
break;
}
const opcode = OPCODES[opcode_index];
// Parse the arguments.
try {
const args = parse_instruction_arguments(cursor, opcode, dc_gc_format);
instructions.push(new_instruction(opcode, args));
} catch (e) {
if (lenient) {
logger.error(
`Exception occurred while parsing arguments for instruction ${opcode.mnemonic}.`,
e,
);
instructions.push(new_instruction(opcode, []));
} else {
throw e;
}
}
}
// Recurse on label drop-through.
if (next_label != undefined) {
// Find the first ret or jmp.
let drop_through = true;
for (let i = instructions.length - 1; i >= 0; i--) {
const opcode = instructions[i].opcode;
if (opcode.code === OP_RET.code || opcode.code === OP_JMP.code) {
drop_through = false;
break;
}
}
if (drop_through) {
parse_segment(
offset_to_segment,
label_holder,
cursor,
next_label,
SegmentType.Instructions,
lenient,
dc_gc_format,
);
}
}
}
function parse_data_segment(
offset_to_segment: Map<number, Segment>,
cursor: Cursor,
end_offset: number,
labels: number[],
): void {
const start_offset = cursor.position;
const segment: DataSegment = {
type: SegmentType.Data,
labels,
data: cursor.array_buffer(end_offset - start_offset),
asm: { labels: [] },
};
offset_to_segment.set(start_offset, segment);
}
function parse_string_segment(
offset_to_segment: Map<number, Segment>,
cursor: Cursor,
end_offset: number,
labels: number[],
dc_gc_format: boolean,
): void {
const start_offset = cursor.position;
const segment: StringSegment = {
type: SegmentType.String,
labels,
value: dc_gc_format
? cursor.string_ascii(end_offset - start_offset, true, true)
: cursor.string_utf16(end_offset - start_offset, true, true),
asm: { labels: [] },
};
offset_to_segment.set(start_offset, segment);
}
function parse_instruction_arguments(cursor: Cursor, opcode: Opcode, dc_gc_format: boolean): Arg[] {
const args: Arg[] = [];
if (opcode.stack !== StackInteraction.Pop) {
for (const param of opcode.params) {
switch (param.type.kind) {
case Kind.Byte:
args.push(new_arg(cursor.u8(), 1));
break;
case Kind.Word:
args.push(new_arg(cursor.u16(), 2));
break;
case Kind.DWord:
args.push(new_arg(cursor.i32(), 4));
break;
case Kind.Float:
args.push(new_arg(cursor.f32(), 4));
break;
case Kind.Label:
case Kind.ILabel:
case Kind.DLabel:
case Kind.SLabel:
args.push(new_arg(cursor.u16(), 2));
break;
case Kind.String:
{
const start_pos = cursor.position;
const max_bytes = Math.min(4096, cursor.bytes_left);
args.push(
new_arg(
dc_gc_format
? cursor.string_ascii(max_bytes, true, false)
: cursor.string_utf16(max_bytes, true, false),
cursor.position - start_pos,
),
);
}
break;
case Kind.ILabelVar:
{
const arg_size = cursor.u8();
args.push(...cursor.u16_array(arg_size).map(value => new_arg(value, 2)));
}
break;
case Kind.RegRef:
case Kind.RegTupRef:
args.push(new_arg(cursor.u8(), 1));
break;
case Kind.RegRefVar:
{
const arg_size = cursor.u8();
args.push(...cursor.u8_array(arg_size).map(value => new_arg(value, 1)));
}
break;
default:
throw new Error(`Parameter type ${Kind[param.type.kind]} not implemented.`);
}
}
}
return args;
}
function write_object_code(
cursor: WritableCursor,
segments: readonly Segment[],
): { size: number; label_offsets: number[] } {
const start_pos = cursor.position;
// Keep track of label offsets.
const label_offsets: number[] = [];
for (const segment of segments) {
for (const label of segment.labels) {
label_offsets[label] = cursor.position - start_pos;
}
if (segment.type === SegmentType.Instructions) {
for (const instruction of segment.instructions) {
const opcode = instruction.opcode;
if (opcode.size === 2) {
cursor.write_u8(opcode.code >>> 8);
}
cursor.write_u8(opcode.code & 0xff);
if (opcode.stack !== StackInteraction.Pop) {
for (let i = 0; i < opcode.params.length; i++) {
const param = opcode.params[i];
const args = instruction.param_to_args[i];
const [arg] = args;
switch (param.type.kind) {
case Kind.Byte:
if (arg.value >= 0) {
cursor.write_u8(arg.value);
} else {
cursor.write_i8(arg.value);
}
break;
case Kind.Word:
if (arg.value >= 0) {
cursor.write_u16(arg.value);
} else {
cursor.write_i16(arg.value);
}
break;
case Kind.DWord:
if (arg.value >= 0) {
cursor.write_u32(arg.value);
} else {
cursor.write_i32(arg.value);
}
break;
case Kind.Float:
cursor.write_f32(arg.value);
break;
case Kind.Label:
case Kind.ILabel:
case Kind.DLabel:
case Kind.SLabel:
cursor.write_u16(arg.value);
break;
case Kind.String:
cursor.write_string_utf16(arg.value, arg.size);
break;
case Kind.ILabelVar:
cursor.write_u8(args.length);
cursor.write_u16_array(args.map(arg => arg.value));
break;
case Kind.RegRef:
case Kind.RegTupRef:
cursor.write_u8(arg.value);
break;
case Kind.RegRefVar:
cursor.write_u8(args.length);
cursor.write_u8_array(args.map(arg => arg.value));
break;
default:
// TYPE_ANY, TYPE_VALUE and TYPE_POINTER cannot be serialized.
throw new Error(
`Parameter type ${Kind[param.type.kind]} not implemented.`,
);
}
}
}
}
} else if (segment.type === SegmentType.String) {
// String segments should be multiples of 4 bytes.
const byte_length = 4 * Math.ceil((segment.value.length + 1) / 2);
cursor.write_string_utf16(segment.value, byte_length);
} else {
cursor.write_cursor(new ArrayBufferCursor(segment.data, cursor.endianness));
}
}
return { size: cursor.position - start_pos, label_offsets };
return buffer;
}

View File

@ -59,8 +59,8 @@ if (process.env["RUN_ALL_TESTS"] === "true") {
function round_trip_test(path: string, file_name: string, contents: Buffer): void {
test(`parse_quest and write_quest_qst ${path}`, () => {
const orig_quest = parse_qst_to_quest(new BufferCursor(contents, Endianness.Little))!;
const test_bin = write_quest_qst(orig_quest, file_name);
const test_quest = parse_qst_to_quest(new ArrayBufferCursor(test_bin, Endianness.Little))!;
const test_qst = write_quest_qst(orig_quest, file_name);
const test_quest = parse_qst_to_quest(new ArrayBufferCursor(test_qst, Endianness.Little))!;
expect(test_quest.name).toBe(orig_quest.name);
expect(test_quest.short_description).toBe(orig_quest.short_description);

View File

@ -1,10 +1,5 @@
import {
Instruction,
InstructionSegment,
Segment,
SegmentType,
} from "../../../../quest_editor/scripting/instructions";
import { OP_BB_MAP_DESIGNATE, OP_SET_EPISODE } from "../../../../quest_editor/scripting/opcodes";
import { InstructionSegment, Segment, SegmentType } from "../../asm/instructions";
import { OP_SET_EPISODE } from "../../asm/opcodes";
import { prs_compress } from "../../compression/prs/compress";
import { prs_decompress } from "../../compression/prs/decompress";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
@ -12,7 +7,7 @@ import { Cursor } from "../../cursor/Cursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { Endianness } from "../../Endianness";
import { parse_bin, write_bin } from "./bin";
import { DatFile, DatNpc, DatObject, DatUnknown, parse_dat, write_dat } from "./dat";
import { DatNpc, DatObject, DatUnknown, parse_dat, write_dat } from "./dat";
import { QuestEvent, QuestNpc, QuestObject } from "./entities";
import { Episode } from "./Episode";
import { object_data, ObjectType, pso_id_to_object_type } from "./object_types";
@ -20,6 +15,8 @@ import { parse_qst, QstContainedFile, write_qst } from "./qst";
import { npc_data, NpcType } from "./npc_types";
import { reinterpret_f32_as_i32, reinterpret_i32_as_f32 } from "../../../primitive_conversion";
import { LogManager } from "../../../Logger";
import { parse_object_code, write_object_code } from "./object_code";
import { get_map_designations } from "../../asm/data_flow_analysis/get_map_designations";
const logger = LogManager.get("core/data_formats/parsing/quest");
@ -48,34 +45,42 @@ export function parse_bin_dat_to_quest(
lenient: boolean = false,
): Quest | undefined {
// Decompress and parse files.
const bin_decompressed = prs_decompress(bin_cursor);
const { bin, dc_gc_format } = parse_bin(bin_decompressed);
const dat_decompressed = prs_decompress(dat_cursor);
const dat = parse_dat(dat_decompressed);
const objects = parse_obj_data(dat.objs);
const bin_decompressed = prs_decompress(bin_cursor);
const bin = parse_bin(
bin_decompressed,
extract_script_entry_points(objects, dat.npcs),
lenient,
);
// Extract episode and map designations from object code.
let episode = Episode.I;
let map_designations: Map<number, number> = new Map();
if (bin.object_code.length) {
const object_code = parse_object_code(
bin.object_code,
bin.label_offsets,
extract_script_entry_points(objects, dat.npcs),
lenient,
dc_gc_format,
);
if (object_code.length) {
const instruction_segments = object_code.filter(
s => s.type === SegmentType.Instructions,
) as InstructionSegment[];
let label_0_segment: InstructionSegment | undefined;
for (const segment of bin.object_code) {
if (segment.type === SegmentType.Instructions && segment.labels.includes(0)) {
for (const segment of instruction_segments) {
if (segment.labels.includes(0)) {
label_0_segment = segment;
break;
}
}
if (label_0_segment) {
episode = get_episode(label_0_segment.instructions);
map_designations = extract_map_designations(dat, episode, label_0_segment.instructions);
episode = get_episode(label_0_segment);
map_designations = get_map_designations(instruction_segments, label_0_segment);
} else {
logger.warn(`No instruction for label 0 found.`);
}
@ -94,7 +99,7 @@ export function parse_bin_dat_to_quest(
npcs: parse_npc_data(episode, dat.npcs),
events: dat.events,
dat_unknowns: dat.unknowns,
object_code: bin.object_code,
object_code,
shop_items: bin.shop_items,
map_designations,
};
@ -145,15 +150,20 @@ export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
events: quest.events,
unknowns: quest.dat_unknowns,
});
const { object_code, label_offsets } = write_object_code(quest.object_code);
const bin = write_bin({
quest_id: quest.id,
language: quest.language,
quest_name: quest.name,
short_description: quest.short_description,
long_description: quest.long_description,
object_code: quest.object_code,
object_code,
label_offsets,
shop_items: quest.shop_items,
});
const ext_start = file_name.lastIndexOf(".");
const base_file_name =
ext_start === -1 ? file_name.slice(0, 11) : file_name.slice(0, Math.min(11, ext_start));
@ -179,8 +189,8 @@ export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
/**
* Defaults to episode I.
*/
function get_episode(func_0_instructions: Instruction[]): Episode {
const set_episode = func_0_instructions.find(
function get_episode(func_0_segment: InstructionSegment): Episode {
const set_episode = func_0_segment.instructions.find(
instruction => instruction.opcode.code === OP_SET_EPISODE.code,
);
@ -200,22 +210,6 @@ function get_episode(func_0_instructions: Instruction[]): Episode {
}
}
function extract_map_designations(
dat: DatFile,
episode: Episode,
func_0_instructions: Instruction[],
): Map<number, number> {
const map_designations = new Map<number, number>();
for (const inst of func_0_instructions) {
if (inst.opcode.code === OP_BB_MAP_DESIGNATE.code) {
map_designations.set(inst.args[0].value, inst.args[2].value);
}
}
return map_designations;
}
function extract_script_entry_points(
objects: readonly QuestObject[],
npcs: readonly DatNpc[],

View File

@ -0,0 +1,721 @@
import {
Arg,
DataSegment,
Instruction,
InstructionSegment,
new_arg,
new_instruction,
Segment,
SegmentType,
StringSegment,
} from "../../asm/instructions";
import { Cursor } from "../../cursor/Cursor";
import { ControlFlowGraph } from "../../asm/data_flow_analysis/ControlFlowGraph";
import { Kind, OP_JMP, OP_RET, Opcode, OPCODES, StackInteraction } from "../../asm/opcodes";
import { get_register_value } from "../../asm/data_flow_analysis/get_register_value";
import { get_stack_value } from "../../asm/data_flow_analysis/get_stack_value";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { Endianness } from "../../Endianness";
import { LogManager } from "../../../Logger";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
const logger = LogManager.get("core/data_formats/parsing/quest/object_code");
const SEGMENT_PRIORITY: number[] = [];
SEGMENT_PRIORITY[SegmentType.Instructions] = 2;
SEGMENT_PRIORITY[SegmentType.String] = 1;
SEGMENT_PRIORITY[SegmentType.Data] = 0;
export function parse_object_code(
object_code: ArrayBuffer,
label_offsets: readonly number[],
entry_labels: readonly number[],
lenient: boolean,
dc_gc_format: boolean,
): Segment[] {
return internal_parse_object_code(
new ArrayBufferCursor(object_code, Endianness.Little),
new LabelHolder(label_offsets),
entry_labels,
lenient,
dc_gc_format,
);
}
export function write_object_code(
segments: readonly Segment[],
): { object_code: ArrayBuffer; label_offsets: number[] } {
const cursor = new ResizableBufferCursor(
new ResizableBuffer(100 * segments.length),
Endianness.Little,
);
const start_pos = cursor.position;
// Keep track of label offsets.
const label_offsets: number[] = [];
for (const segment of segments) {
for (const label of segment.labels) {
label_offsets[label] = cursor.position - start_pos;
}
if (segment.type === SegmentType.Instructions) {
for (const instruction of segment.instructions) {
const opcode = instruction.opcode;
if (opcode.size === 2) {
cursor.write_u8(opcode.code >>> 8);
}
cursor.write_u8(opcode.code & 0xff);
if (opcode.stack !== StackInteraction.Pop) {
for (let i = 0; i < opcode.params.length; i++) {
const param = opcode.params[i];
const args = instruction.param_to_args[i];
const [arg] = args;
switch (param.type.kind) {
case Kind.Byte:
if (arg.value >= 0) {
cursor.write_u8(arg.value);
} else {
cursor.write_i8(arg.value);
}
break;
case Kind.Word:
if (arg.value >= 0) {
cursor.write_u16(arg.value);
} else {
cursor.write_i16(arg.value);
}
break;
case Kind.DWord:
if (arg.value >= 0) {
cursor.write_u32(arg.value);
} else {
cursor.write_i32(arg.value);
}
break;
case Kind.Float:
cursor.write_f32(arg.value);
break;
case Kind.Label:
case Kind.ILabel:
case Kind.DLabel:
case Kind.SLabel:
cursor.write_u16(arg.value);
break;
case Kind.String:
cursor.write_string_utf16(arg.value, arg.size);
break;
case Kind.ILabelVar:
cursor.write_u8(args.length);
cursor.write_u16_array(args.map(arg => arg.value));
break;
case Kind.RegRef:
case Kind.RegTupRef:
cursor.write_u8(arg.value);
break;
case Kind.RegRefVar:
cursor.write_u8(args.length);
cursor.write_u8_array(args.map(arg => arg.value));
break;
default:
// TYPE_ANY, TYPE_VALUE and TYPE_POINTER cannot be serialized.
throw new Error(
`Parameter type ${Kind[param.type.kind]} not implemented.`,
);
}
}
}
}
} else if (segment.type === SegmentType.String) {
// String segments should be multiples of 4 bytes.
const byte_length = 4 * Math.ceil((segment.value.length + 1) / 2);
cursor.write_string_utf16(segment.value, byte_length);
} else {
cursor.write_cursor(new ArrayBufferCursor(segment.data, cursor.endianness));
}
}
for (let i = 0; i < label_offsets.length; i++) {
if (label_offsets[i] == undefined) {
label_offsets[i] = -1;
}
}
return { object_code: cursor.seek_start(0).array_buffer(), label_offsets };
}
function internal_parse_object_code(
cursor: Cursor,
label_holder: LabelHolder,
entry_labels: readonly number[],
lenient: boolean,
dc_gc_format: boolean,
): Segment[] {
const offset_to_segment = new Map<number, Segment>();
find_and_parse_segments(
cursor,
label_holder,
entry_labels.reduce((m, l) => m.set(l, SegmentType.Instructions), new Map()),
offset_to_segment,
lenient,
dc_gc_format,
);
const segments: Segment[] = [];
// Put segments in an array and parse left-over segments as data.
let offset = 0;
while (offset < cursor.size) {
let segment: Segment | undefined = offset_to_segment.get(offset);
// If we have a segment, add it. Otherwise create a new data segment.
if (!segment) {
const labels = label_holder.get_labels(offset);
let end_offset: number;
if (labels) {
const info = label_holder.get_info(labels[0])!;
end_offset = info.next ? info.next.offset : cursor.size;
} else {
end_offset = cursor.size;
for (const label of label_holder.labels) {
if (label.offset > offset) {
end_offset = label.offset;
break;
}
}
}
cursor.seek_start(offset);
parse_data_segment(offset_to_segment, cursor, end_offset, labels || []);
segment = offset_to_segment.get(offset);
// Should never happen.
if (end_offset <= offset) {
logger.error(
`Next offset ${end_offset} was smaller than or equal to current offset ${offset}.`,
);
break;
}
// Should never happen either.
if (!segment) {
logger.error(`Couldn't create segment for offset ${offset}.`);
break;
}
}
segments.push(segment);
switch (segment.type) {
case SegmentType.Instructions:
for (const instruction of segment.instructions) {
offset += instruction.size;
}
break;
case SegmentType.Data:
offset += segment.data.byteLength;
break;
case SegmentType.String:
// String segments should be multiples of 4 bytes.
offset += 4 * Math.ceil((segment.value.length + 1) / 2);
break;
default:
throw new Error(`${SegmentType[segment!.type]} not implemented.`);
}
}
// Add unreferenced labels to their segment.
for (const { label, offset } of label_holder.labels) {
const segment = offset_to_segment.get(offset);
if (segment) {
if (!segment.labels.includes(label)) {
segment.labels.push(label);
segment.labels.sort((a, b) => a - b);
}
} else {
logger.warn(`Label ${label} with offset ${offset} does not point to anything.`);
}
}
// Sanity check parsed object code.
if (cursor.size !== offset) {
const message = `Expected to parse ${cursor.size} bytes but parsed ${offset} instead.`;
if (lenient) {
logger.error(message);
} else {
throw new Error(message);
}
}
return segments;
}
function find_and_parse_segments(
cursor: Cursor,
label_holder: LabelHolder,
labels: Map<number, SegmentType>,
offset_to_segment: Map<number, Segment>,
lenient: boolean,
dc_gc_format: boolean,
): void {
let start_segment_count: number;
// Iteratively parse segments from label references.
do {
start_segment_count = offset_to_segment.size;
for (const [label, type] of labels) {
parse_segment(
offset_to_segment,
label_holder,
cursor,
label,
type,
lenient,
dc_gc_format,
);
}
// Find label references.
const sorted_segments = [...offset_to_segment.entries()]
.filter(([, s]) => s.type === SegmentType.Instructions)
.sort(([a], [b]) => a - b)
.map(([, s]) => s as InstructionSegment);
const cfg = ControlFlowGraph.create(sorted_segments);
labels = new Map();
for (const segment of sorted_segments) {
for (const instruction of segment.instructions) {
for (let i = 0; i < instruction.opcode.params.length; i++) {
const param = instruction.opcode.params[i];
switch (param.type.kind) {
case Kind.ILabel:
get_arg_label_values(
cfg,
labels,
instruction,
i,
SegmentType.Instructions,
);
break;
case Kind.ILabelVar:
// Never on the stack.
// Eat all remaining arguments.
for (; i < instruction.args.length; i++) {
labels.set(instruction.args[i].value, SegmentType.Instructions);
}
break;
case Kind.DLabel:
get_arg_label_values(cfg, labels, instruction, i, SegmentType.Data);
break;
case Kind.SLabel:
get_arg_label_values(cfg, labels, instruction, i, SegmentType.String);
break;
case Kind.RegTupRef:
{
// Never on the stack.
const arg = instruction.args[i];
for (let j = 0; j < param.type.register_tuples.length; j++) {
const reg_tup = param.type.register_tuples[j];
if (reg_tup.type.kind === Kind.ILabel) {
const label_values = get_register_value(
cfg,
instruction,
arg.value + j,
);
if (label_values.size() <= 10) {
for (const label of label_values) {
labels.set(label, SegmentType.Instructions);
}
}
}
}
}
break;
}
}
}
}
} while (offset_to_segment.size > start_segment_count);
}
/**
* @returns immediate arguments or stack arguments.
*/
function get_arg_label_values(
cfg: ControlFlowGraph,
labels: Map<number, SegmentType>,
instruction: Instruction,
param_idx: number,
segment_type: SegmentType,
): void {
if (instruction.opcode.stack === StackInteraction.Pop) {
const stack_values = get_stack_value(
cfg,
instruction,
instruction.opcode.params.length - param_idx - 1,
);
if (stack_values.size() <= 10) {
for (const value of stack_values) {
const old_type = labels.get(value);
if (
old_type == undefined ||
SEGMENT_PRIORITY[segment_type] > SEGMENT_PRIORITY[old_type]
) {
labels.set(value, segment_type);
}
}
}
} else {
const value = instruction.args[param_idx].value;
const old_type = labels.get(value);
if (old_type == undefined || SEGMENT_PRIORITY[segment_type] > SEGMENT_PRIORITY[old_type]) {
labels.set(value, segment_type);
}
}
}
function parse_segment(
offset_to_segment: Map<number, Segment>,
label_holder: LabelHolder,
cursor: Cursor,
label: number,
type: SegmentType,
lenient: boolean,
dc_gc_format: boolean,
): void {
try {
const info = label_holder.get_info(label);
if (info == undefined) {
logger.warn(`Label ${label} is not registered in the label table.`);
return;
}
// Check whether we've already parsed this segment and reparse it if necessary.
const segment = offset_to_segment.get(info.offset);
let labels: number[];
if (segment) {
if (!segment.labels.includes(label)) {
segment.labels.push(label);
segment.labels.sort((a, b) => a - b);
}
if (SEGMENT_PRIORITY[type] > SEGMENT_PRIORITY[segment.type]) {
labels = segment.labels;
} else {
return;
}
} else {
labels = [label];
}
const end_offset = info.next ? info.next.offset : cursor.size;
cursor.seek_start(info.offset);
switch (type) {
case SegmentType.Instructions:
parse_instructions_segment(
offset_to_segment,
label_holder,
cursor,
end_offset,
labels,
info.next && info.next.label,
lenient,
dc_gc_format,
);
break;
case SegmentType.Data:
parse_data_segment(offset_to_segment, cursor, end_offset, labels);
break;
case SegmentType.String:
parse_string_segment(offset_to_segment, cursor, end_offset, labels, dc_gc_format);
break;
default:
throw new Error(`Segment type ${SegmentType[type]} not implemented.`);
}
} catch (e) {
if (lenient) {
logger.error("Couldn't fully parse object code segment.", e);
} else {
throw e;
}
}
}
function parse_instructions_segment(
offset_to_segment: Map<number, Segment>,
label_holder: LabelHolder,
cursor: Cursor,
end_offset: number,
labels: number[],
next_label: number | undefined,
lenient: boolean,
dc_gc_format: boolean,
): void {
const instructions: Instruction[] = [];
const segment: InstructionSegment = {
type: SegmentType.Instructions,
labels,
instructions,
asm: { labels: [] },
};
offset_to_segment.set(cursor.position, segment);
while (cursor.position < end_offset) {
// Parse the opcode.
const main_opcode = cursor.u8();
let opcode_index;
switch (main_opcode) {
case 0xf8:
case 0xf9:
opcode_index = (main_opcode << 8) | cursor.u8();
break;
default:
opcode_index = main_opcode;
break;
}
const opcode = OPCODES[opcode_index];
// Parse the arguments.
try {
const args = parse_instruction_arguments(cursor, opcode, dc_gc_format);
instructions.push(new_instruction(opcode, args));
} catch (e) {
if (lenient) {
logger.error(
`Exception occurred while parsing arguments for instruction ${opcode.mnemonic}.`,
e,
);
instructions.push(new_instruction(opcode, []));
} else {
throw e;
}
}
}
// Recurse on label drop-through.
if (next_label != undefined) {
// Find the first ret or jmp.
let drop_through = true;
for (let i = instructions.length - 1; i >= 0; i--) {
const opcode = instructions[i].opcode;
if (opcode.code === OP_RET.code || opcode.code === OP_JMP.code) {
drop_through = false;
break;
}
}
if (drop_through) {
parse_segment(
offset_to_segment,
label_holder,
cursor,
next_label,
SegmentType.Instructions,
lenient,
dc_gc_format,
);
}
}
}
function parse_data_segment(
offset_to_segment: Map<number, Segment>,
cursor: Cursor,
end_offset: number,
labels: number[],
): void {
const start_offset = cursor.position;
const segment: DataSegment = {
type: SegmentType.Data,
labels,
data: cursor.array_buffer(end_offset - start_offset),
asm: { labels: [] },
};
offset_to_segment.set(start_offset, segment);
}
function parse_string_segment(
offset_to_segment: Map<number, Segment>,
cursor: Cursor,
end_offset: number,
labels: number[],
dc_gc_format: boolean,
): void {
const start_offset = cursor.position;
const segment: StringSegment = {
type: SegmentType.String,
labels,
value: dc_gc_format
? cursor.string_ascii(end_offset - start_offset, true, true)
: cursor.string_utf16(end_offset - start_offset, true, true),
asm: { labels: [] },
};
offset_to_segment.set(start_offset, segment);
}
function parse_instruction_arguments(cursor: Cursor, opcode: Opcode, dc_gc_format: boolean): Arg[] {
const args: Arg[] = [];
if (opcode.stack !== StackInteraction.Pop) {
for (const param of opcode.params) {
switch (param.type.kind) {
case Kind.Byte:
args.push(new_arg(cursor.u8(), 1));
break;
case Kind.Word:
args.push(new_arg(cursor.u16(), 2));
break;
case Kind.DWord:
args.push(new_arg(cursor.i32(), 4));
break;
case Kind.Float:
args.push(new_arg(cursor.f32(), 4));
break;
case Kind.Label:
case Kind.ILabel:
case Kind.DLabel:
case Kind.SLabel:
args.push(new_arg(cursor.u16(), 2));
break;
case Kind.String:
{
const start_pos = cursor.position;
const max_bytes = Math.min(4096, cursor.bytes_left);
args.push(
new_arg(
dc_gc_format
? cursor.string_ascii(max_bytes, true, false)
: cursor.string_utf16(max_bytes, true, false),
cursor.position - start_pos,
),
);
}
break;
case Kind.ILabelVar:
{
const arg_size = cursor.u8();
args.push(...cursor.u16_array(arg_size).map(value => new_arg(value, 2)));
}
break;
case Kind.RegRef:
case Kind.RegTupRef:
args.push(new_arg(cursor.u8(), 1));
break;
case Kind.RegRefVar:
{
const arg_size = cursor.u8();
args.push(...cursor.u8_array(arg_size).map(value => new_arg(value, 1)));
}
break;
default:
throw new Error(`Parameter type ${Kind[param.type.kind]} not implemented.`);
}
}
}
return args;
}
class LabelHolder {
/**
* Labels and their offset sorted by offset and then label.
*/
labels: { label: number; offset: number }[] = [];
/**
* Mapping of labels to their offset and index into labels.
*/
private label_map: Map<number, { offset: number; index: number }> = new Map();
/**
* Mapping of offsets to lists of labels.
*/
private offset_map: Map<number, number[]> = new Map();
constructor(label_offsets: readonly number[]) {
// Populate the main label list.
for (let label = 0; label < label_offsets.length; label++) {
const offset = label_offsets[label];
if (offset !== -1) {
this.labels.push({ label, offset });
}
}
// Sort by offset, then label.
this.labels.sort((a, b) => a.offset - b.offset || a.label - b.label);
// Populate the label and offset maps.
for (let index = 0; index < this.labels.length; index++) {
const { label, offset } = this.labels[index];
this.label_map.set(label, { offset, index });
const labels = this.offset_map.get(offset) || [];
labels.push(label);
this.offset_map.set(offset, labels);
}
}
get_labels(offset: number): number[] | undefined {
return this.offset_map.get(offset);
}
get_info(
label: number,
): { offset: number; next?: { label: number; offset: number } } | undefined {
const offset_and_index = this.label_map.get(label);
if (offset_and_index == undefined) {
return undefined;
}
// Find the next label with a different offset.
let next: { label: number; offset: number } | undefined;
for (let i = offset_and_index.index + 1; i < this.labels.length; i++) {
next = this.labels[i];
// Skip the label if it points to the same offset.
if (next.offset > offset_and_index.offset) {
break;
} else {
next = undefined;
}
}
return {
offset: offset_and_index.offset,
next,
};
}
}

View File

@ -14,7 +14,7 @@ import { QuestNpcModel } from "./model/QuestNpcModel";
import { QuestObjectModel } from "./model/QuestObjectModel";
import { AreaStore } from "./stores/AreaStore";
import { InstructionPointer } from "./scripting/vm/InstructionPointer";
import { clone_segment } from "./scripting/instructions";
import { clone_segment } from "../core/data_formats/asm/instructions";
export enum QuestRunnerState {
/**

View File

@ -107,9 +107,7 @@ export class QuestEditorToolBarController extends Controller {
input_element.type = "file";
input_element.multiple = true;
input_element.onchange = () => {
if (input_element.files && input_element.files.length) {
this.open_files(Array.prototype.slice.apply(input_element.files));
}
this.open_files(Array.prototype.slice.apply(input_element.files));
};
input_element.click();
}),
@ -144,6 +142,8 @@ export class QuestEditorToolBarController extends Controller {
// TODO: notify user of problems.
open_files = async (files: File[]): Promise<void> => {
try {
if (files.length === 0) return;
let quest: Quest | undefined;
const qst = files.find(f => f.name.toLowerCase().endsWith(".qst"));
@ -152,6 +152,10 @@ export class QuestEditorToolBarController extends Controller {
const buffer = await read_file(qst);
quest = parse_qst_to_quest(new ArrayBufferCursor(buffer, Endianness.Little));
this.quest_filename = qst.name;
if (!quest) {
logger.error("Couldn't parse quest file.");
}
} else {
const bin = files.find(f => f.name.toLowerCase().endsWith(".bin"));
const dat = files.find(f => f.name.toLowerCase().endsWith(".dat"));
@ -164,11 +168,11 @@ export class QuestEditorToolBarController extends Controller {
new ArrayBufferCursor(dat_buffer, Endianness.Little),
);
this.quest_filename = bin.name || dat.name;
}
}
if (!quest) {
logger.error("Couldn't parse quest file.");
if (!quest) {
logger.error("Couldn't parse quest file.");
}
}
}
await this.quest_editor_store.set_current_quest(

View File

@ -4,7 +4,7 @@ import { check_episode, Episode } from "../../core/data_formats/parsing/quest/Ep
import { QuestObjectModel } from "./QuestObjectModel";
import { QuestNpcModel } from "./QuestNpcModel";
import { DatUnknown } from "../../core/data_formats/parsing/quest/dat";
import { Segment } from "../scripting/instructions";
import { Segment } from "../../core/data_formats/asm/instructions";
import { Property } from "../../core/observable/property/Property";
import { AreaVariantModel } from "./AreaVariantModel";
import { ListProperty } from "../../core/observable/property/list/ListProperty";

View File

@ -13,7 +13,7 @@ import {
import { AssemblyError, AssemblySettings, AssemblyWarning } from "./assembly";
import { disassemble } from "./disassembly";
import { QuestModel } from "../model/QuestModel";
import { Kind, OPCODES } from "./opcodes";
import { Kind, OPCODES } from "../../core/data_formats/asm/opcodes";
import { Property } from "../../core/observable/property/Property";
import { property } from "../../core/observable";
import { WritableProperty } from "../../core/observable/property/WritableProperty";

View File

@ -1,5 +1,5 @@
import { assemble } from "./assembly";
import { InstructionSegment, SegmentType } from "./instructions";
import { InstructionSegment, SegmentType } from "../../core/data_formats/asm/instructions";
import {
OP_ARG_PUSHB,
OP_ARG_PUSHL,
@ -10,7 +10,7 @@ import {
OP_SET_EPISODE,
OP_SET_FLOOR_HANDLER,
OP_SET_MAINWARP,
} from "./opcodes";
} from "../../core/data_formats/asm/opcodes";
test("basic script", () => {
const { object_code, warnings, errors } = assemble(

View File

@ -20,7 +20,7 @@ import {
Segment,
SegmentType,
StringSegment,
} from "./instructions";
} from "../../core/data_formats/asm/instructions";
import {
Kind,
OP_ARG_PUSHB,
@ -32,7 +32,7 @@ import {
OPCODES_BY_MNEMONIC,
Param,
StackInteraction,
} from "./opcodes";
} from "../../core/data_formats/asm/opcodes";
import { LogManager } from "../../core/Logger";
const logger = LogManager.get("quest_editor/scripting/assembly");

View File

@ -11,9 +11,15 @@ import {
SignatureHelpOutput,
} from "./assembly_worker_messages";
import { assemble, AssemblySettings } from "./assembly";
import { AsmToken, Segment, SegmentType } from "./instructions";
import { Kind, OP_BB_MAP_DESIGNATE, Opcode, OPCODES_BY_MNEMONIC } from "./opcodes";
import {
AsmToken,
InstructionSegment,
Segment,
SegmentType,
} from "../../core/data_formats/asm/instructions";
import { Kind, Opcode, OPCODES_BY_MNEMONIC } from "../../core/data_formats/asm/opcodes";
import { AssemblyLexer, IdentToken, TokenType } from "./AssemblyLexer";
import { get_map_designations } from "../../core/data_formats/asm/data_flow_analysis/get_map_designations";
const ctx: Worker = self as any;
@ -185,33 +191,31 @@ function assemble_and_send(): void {
label_to_segment_cache.clear();
line_no_to_instructions.splice(0, Infinity);
const map_designations = new Map<number, number>();
let map_designations = new Map<number, number>();
for (let i = 0; i < object_code.length; i++) {
const segment = object_code[i];
const instruction_segments = object_code.filter(
s => s.type === SegmentType.Instructions,
) as InstructionSegment[];
if (segment.type === SegmentType.Instructions) {
// Set map designations.
if (segment.labels.includes(0)) {
for (const inst of segment.instructions) {
if (inst.opcode.code === OP_BB_MAP_DESIGNATE.code) {
map_designations.set(inst.args[0].value, inst.args[2].value);
}
for (let i = 0; i < instruction_segments.length; i++) {
const segment = instruction_segments[i];
// Set map designations.
if (segment.labels.includes(0)) {
map_designations = get_map_designations(instruction_segments, segment);
}
// Index instructions by text position.
for (let j = 0; j < segment.instructions.length; j++) {
const ins = segment.instructions[j];
if (ins.asm) {
if (ins.asm.mnemonic) {
add_index(ins.asm.mnemonic.line_no, i, j);
}
}
// Index instructions by text position.
for (let j = 0; j < segment.instructions.length; j++) {
const ins = segment.instructions[j];
if (ins.asm) {
if (ins.asm.mnemonic) {
add_index(ins.asm.mnemonic.line_no, i, j);
}
for (const arg_asm of ins.asm.args) {
add_index(arg_asm.line_no, i, j);
}
for (const arg_asm of ins.asm.args) {
add_index(arg_asm.line_no, i, j);
}
}
}

View File

@ -1,6 +1,6 @@
import { AssemblyError, AssemblyWarning, AssemblySettings } from "./assembly";
import { Segment } from "./instructions";
import { Opcode } from "./opcodes";
import { Segment } from "../../core/data_formats/asm/instructions";
import { Opcode } from "../../core/data_formats/asm/opcodes";
export enum InputMessageType {
NewAssembly,

View File

@ -12,8 +12,19 @@ import {
Segment,
segment_arrays_equal,
SegmentType,
} from "./instructions";
import { OP_ARG_PUSHW, OP_RET, OP_SWITCH_JMP, OP_VA_CALL, OP_VA_END, OP_VA_START } from "./opcodes";
} from "../../core/data_formats/asm/instructions";
import {
OP_ARG_PUSHW,
OP_RET,
OP_SWITCH_JMP,
OP_VA_CALL,
OP_VA_END,
OP_VA_START,
} from "../../core/data_formats/asm/opcodes";
import {
parse_object_code,
write_object_code,
} from "../../core/data_formats/parsing/quest/object_code";
test("vararg instructions should be disassembled correctly", () => {
const asm = disassemble([
@ -82,42 +93,66 @@ test("va list instructions should be disassembled correctly", () => {
test("assembling disassembled object code with manual stack management should result in the same IR", () => {
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const bin = parse_bin(orig_bytes);
const { bin } = parse_bin(orig_bytes);
const orig_object_code = parse_object_code(
bin.object_code,
bin.label_offsets,
[0],
false,
false,
);
const { object_code, warnings, errors } = assemble(disassemble(bin.object_code, true), true);
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, true), true);
expect(errors).toEqual([]);
expect(warnings).toEqual([]);
expect(segment_arrays_equal(object_code, bin.object_code)).toBe(true);
expect(segment_arrays_equal(object_code, orig_object_code)).toBe(true);
});
// Round-trip test.
test("assembling disassembled object code with automatic stack management should result in the same IR", () => {
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const bin = parse_bin(orig_bytes);
const { bin } = parse_bin(orig_bytes);
const orig_object_code = parse_object_code(
bin.object_code,
bin.label_offsets,
[0],
false,
false,
);
const { object_code, warnings, errors } = assemble(disassemble(bin.object_code, false), false);
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, false), false);
expect(errors).toEqual([]);
expect(warnings).toEqual([]);
expect(segment_arrays_equal(object_code, bin.object_code)).toBe(true);
expect(segment_arrays_equal(object_code, orig_object_code)).toBe(true);
});
// Round-trip test.
test("assembling disassembled object code with manual stack management should result in the same object code", () => {
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const bin = parse_bin(orig_bytes);
const { bin } = parse_bin(orig_bytes);
const orig_object_code = parse_object_code(
bin.object_code,
bin.label_offsets,
[0],
false,
false,
);
const { object_code, warnings, errors } = assemble(disassemble(bin.object_code, true), true);
const { object_code, warnings, errors } = assemble(disassemble(orig_object_code, true), true);
expect(errors).toEqual([]);
expect(warnings).toEqual([]);
const test_bytes = new ArrayBufferCursor(write_bin({ ...bin, object_code }), Endianness.Little);
const test_bytes = new ArrayBufferCursor(
write_bin({ ...bin, ...write_object_code(object_code).object_code }),
Endianness.Little,
);
orig_bytes.seek_start(0);
expect(test_bytes.size).toBe(orig_bytes.size);
@ -144,7 +179,15 @@ test("assembling disassembled object code with manual stack management should re
test("disassembling assembled assembly code with automatic stack management should result the same assembly code", () => {
const orig_buffer = readFileSync("test/resources/quest27_e.bin");
const orig_bytes = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const orig_asm = disassemble(parse_bin(orig_bytes).object_code, false);
const { bin } = parse_bin(orig_bytes);
const orig_object_code = parse_object_code(
bin.object_code,
bin.label_offsets,
[0],
false,
false,
);
const orig_asm = disassemble(orig_object_code, false);
const { object_code, warnings, errors } = assemble(orig_asm, false);

View File

@ -1,6 +1,6 @@
import { reinterpret_i32_as_f32 } from "../../core/primitive_conversion";
import { Arg, Segment, SegmentType } from "./instructions";
import { AnyType, Kind, OP_VA_END, OP_VA_START, Param, StackInteraction } from "./opcodes";
import { Arg, Segment, SegmentType } from "../../core/data_formats/asm/instructions";
import { AnyType, Kind, OP_VA_END, OP_VA_START, Param, StackInteraction } from "../../core/data_formats/asm/opcodes";
import { LogManager } from "../../core/Logger";
import { number_to_hex_string } from "../../core/util";

View File

@ -1,5 +1,5 @@
import { VirtualMachine } from "./VirtualMachine";
import { SegmentType } from "../instructions";
import { SegmentType } from "../../../core/data_formats/asm/instructions";
import { InstructionPointer } from "./InstructionPointer";
import { StepMode } from "./Thread";

View File

@ -1,4 +1,4 @@
import { AsmToken, Instruction, InstructionSegment, Segment, SegmentType } from "../instructions";
import { AsmToken, Instruction, InstructionSegment, Segment, SegmentType } from "../../../core/data_formats/asm/instructions";
import { assert } from "../../../core/util";
export class InstructionPointer {

View File

@ -1,4 +1,4 @@
import { Kind, StackInteraction } from "../opcodes";
import { Kind, StackInteraction } from "../../../core/data_formats/asm/opcodes";
import { VirtualMachineIO } from "./io";
import { Memory } from "./Memory";
import { Endianness } from "../../../core/data_formats/Endianness";

View File

@ -4,7 +4,7 @@
import { ExecutionResult, VirtualMachine } from "./VirtualMachine";
import { to_instructions } from "../../../../test/src/utils";
import { Segment } from "../instructions";
import { Segment } from "../../../core/data_formats/asm/instructions";
import { Random } from "./Random";
import { Episode } from "../../../core/data_formats/parsing/quest/Episode";
import { DefaultVirtualMachineIO } from "./io";

View File

@ -1,4 +1,4 @@
import { Segment, SegmentType } from "../instructions";
import { Segment, SegmentType } from "../../../core/data_formats/asm/instructions";
import {
Kind,
OP_ADD,
@ -87,7 +87,7 @@ import {
OP_WINEND,
OP_XOR,
OP_XORI,
} from "../opcodes";
} from "../../../core/data_formats/asm/opcodes";
import {
andreduce,
andsecond,

View File

@ -1,4 +1,4 @@
import { AsmToken } from "../instructions";
import { AsmToken } from "../../../core/data_formats/asm/instructions";
import { InstructionPointer } from "./InstructionPointer";
import { LogManager } from "../../../core/Logger";

View File

@ -17,7 +17,7 @@ import {
} from "../model/QuestEventActionModel";
import { QuestEventDagModel } from "../model/QuestEventDagModel";
import { QuestEvent, QuestNpc } from "../../core/data_formats/parsing/quest/entities";
import { clone_segment } from "../scripting/instructions";
import { clone_segment } from "../../core/data_formats/asm/instructions";
import { AreaStore } from "./AreaStore";
import { LogManager } from "../../core/Logger";
import { euler } from "../model/euler";

View File

@ -8,7 +8,7 @@ import { Euler, Vector3 } from "three";
import { QuestEventDagModel } from "../model/QuestEventDagModel";
import { AreaStore } from "./AreaStore";
import { assemble } from "../scripting/assembly";
import { Segment } from "../scripting/instructions";
import { Segment } from "../../core/data_formats/asm/instructions";
import { euler } from "../model/euler";
export function create_new_quest(area_store: AreaStore, episode: Episode): QuestModel {

View File

@ -1,5 +1,5 @@
import * as fs from "fs";
import { InstructionSegment, SegmentType } from "../../src/quest_editor/scripting/instructions";
import { InstructionSegment, SegmentType } from "../../src/core/data_formats/asm/instructions";
import { assemble } from "../../src/quest_editor/scripting/assembly";
export async function timeout(millis: number): Promise<void> {

View File

@ -1 +1 @@
37
39