Refactored BufferCursor into a hierarchy of cursors and a ResizableBuffer to improve code clarity.

This commit is contained in:
Daan Vanden Bosch 2019-07-08 23:56:05 +02:00
parent 4e540acf0c
commit f1b3df9754
46 changed files with 1876 additions and 942 deletions

View File

@ -1,242 +0,0 @@
import { BufferCursor } from "./BufferCursor";
test("simple properties and invariants", () => {
const cursor = new BufferCursor(10, true);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(0);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(0);
expect(cursor.bytes_left).toBe(0);
expect(cursor.little_endian).toBe(true);
cursor
.write_u8(99)
.write_u8(99)
.write_u8(99)
.write_u8(99);
cursor.seek(-1);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(3);
expect(cursor.bytes_left).toBe(1);
expect(cursor.little_endian).toBe(true);
});
test("correct byte order handling", () => {
const buffer = new Uint8Array([1, 2, 3, 4]).buffer;
expect(new BufferCursor(buffer, false).u32()).toBe(0x01020304);
expect(new BufferCursor(buffer, true).u32()).toBe(0x04030201);
});
test("reallocation of internal buffer when necessary", () => {
const cursor = new BufferCursor(3, true);
cursor
.write_u8(99)
.write_u8(99)
.write_u8(99)
.write_u8(99);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBeGreaterThanOrEqual(4);
expect(cursor.buffer.byteLength).toBeGreaterThanOrEqual(4);
});
function test_integer_read(method_name: string): void {
test(method_name, () => {
const bytes = parseInt(method_name.replace(/^[iu](\d+)$/, "$1"), 10) / 8;
let test_number_1 = 0;
let test_number_2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const test_arrays: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
test_number_1 <<= 8;
test_number_1 |= i;
test_arrays["false"].push(i);
test_arrays["true"].unshift(i);
}
for (let i = bytes + 1; i <= 2 * bytes; ++i) {
test_number_2 <<= 8;
test_number_2 |= i;
test_arrays["false"].push(i);
test_arrays["true"].splice(bytes, 0, i);
}
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(
new Uint8Array(test_arrays[String(little_endian)]).buffer,
little_endian
);
expect((cursor as any)[method_name]()).toBe(test_number_1);
expect(cursor.position).toBe(bytes);
expect((cursor as any)[method_name]()).toBe(test_number_2);
expect(cursor.position).toBe(2 * bytes);
}
});
}
test_integer_read("u8");
test_integer_read("u16");
test_integer_read("u32");
test_integer_read("i32");
test("u8_array", () => {
const cursor = new BufferCursor(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]).buffer, true);
expect(cursor.u8_array(3)).toEqual([1, 2, 3]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.seek_start(5).u8_array(3)).toEqual([6, 7, 8]);
});
function test_string_read(method_name: string, char_size: number): void {
test(method_name, () => {
const char_array = [7, 65, 66, 0, 255, 13];
for (const little_endian of [false, true]) {
const char_array_copy = [];
for (const char of char_array) {
if (little_endian) char_array_copy.push(char);
for (let i = 0; i < char_size - 1; ++i) {
char_array_copy.push(0);
}
if (!little_endian) char_array_copy.push(char);
}
const cursor = new BufferCursor(new Uint8Array(char_array_copy).buffer, little_endian);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, true)).toBe("AB");
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, true)).toBe("AB");
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, false)).toBe("AB");
expect(cursor.position).toBe(4 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, false)).toBe("AB");
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, true)).toBe("AB\0ÿ");
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, false)).toBe("AB\0ÿ");
expect(cursor.position).toBe(5 * char_size);
}
});
}
test_string_read("string_ascii", 1);
test_string_read("string_utf16", 2);
function test_integer_write(method_name: string): void {
test(method_name, () => {
const bytes = parseInt(method_name.replace(/^write_[iu](\d+)$/, "$1"), 10) / 8;
let test_number_1 = 0;
let test_number_2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const test_arrays_1: { [index: string]: number[] } = { false: [], true: [] };
const test_arrays_2: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
test_number_1 <<= 8;
test_number_1 |= i;
test_number_2 <<= 8;
test_number_2 |= i + bytes;
test_arrays_1["false"].push(i);
test_arrays_1["true"].unshift(i);
test_arrays_2["false"].push(i + bytes);
test_arrays_2["true"].unshift(i + bytes);
}
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(0, little_endian);
(cursor as any)[method_name](test_number_1);
expect(cursor.position).toBe(bytes);
expect(cursor.seek_start(0).u8_array(bytes)).toEqual(
test_arrays_1[String(little_endian)]
);
expect(cursor.position).toBe(bytes);
(cursor as any)[method_name](test_number_2);
expect(cursor.position).toBe(2 * bytes);
expect(cursor.seek_start(0).u8_array(2 * bytes)).toEqual(
test_arrays_1[String(little_endian)].concat(test_arrays_2[String(little_endian)])
);
}
});
}
test_integer_write("write_u8");
test_integer_write("write_u16");
test_integer_write("write_u32");
test("write_f32", () => {
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(0, little_endian);
cursor.write_f32(1337.9001);
expect(cursor.position).toBe(4);
expect(cursor.seek(-4).f32()).toBeCloseTo(1337.9001, 4);
expect(cursor.position).toBe(4);
cursor.write_f32(103.502);
expect(cursor.position).toBe(8);
expect(cursor.seek(-4).f32()).toBeCloseTo(103.502, 3);
}
});
test("write_u8_array", () => {
for (const little_endian of [false, true]) {
const bytes = 10;
const cursor = new BufferCursor(2 * bytes, little_endian);
const uint8_array = new Uint8Array(cursor.buffer);
const test_array_1 = [];
const test_array_2 = [];
for (let i = 1; i <= bytes; ++i) {
test_array_1.push(i);
test_array_2.push(i + bytes);
}
cursor.write_u8_array(test_array_1);
expect(cursor.position).toBe(bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i]).toBe(test_array_1[i]);
}
cursor.write_u8_array(test_array_2);
expect(cursor.position).toBe(2 * bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i]).toBe(test_array_1[i]);
}
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i + bytes]).toBe(test_array_2[i]);
}
}
});

View File

@ -1,475 +0,0 @@
// TODO: remove dependency on text-encoding because it is no longer maintained.
import { TextDecoder, TextEncoder } from "text-encoding";
const ASCII_DECODER = new TextDecoder("ascii");
const UTF_16BE_DECODER = new TextDecoder("utf-16be");
const UTF_16LE_DECODER = new TextDecoder("utf-16le");
const ASCII_ENCODER = new TextEncoder("ascii");
const UTF_16BE_ENCODER = new TextEncoder("utf-16be");
const UTF_16LE_ENCODER = new TextEncoder("utf-16le");
/**
* A cursor for reading and writing binary data.
* Uses an ArrayBuffer internally. This buffer is reallocated if and only if a write beyond the current capacity happens.
*/
export class BufferCursor {
private _size: number = 0;
/**
* The cursor's size. This value will always be non-negative and equal to or smaller than the cursor's capacity.
*/
get size(): number {
return this._size;
}
set size(size: number) {
if (size < 0) {
throw new Error("Size should be non-negative.");
}
this.ensure_capacity(size);
this._size = size;
}
private _position: number;
/**
* The position from where bytes will be read or written.
*/
get position(): number {
return this._position;
}
private _little_endian: boolean = false;
/**
* Byte order mode.
*/
get little_endian(): boolean {
return this._little_endian;
}
set little_endian(little_endian: boolean) {
this._little_endian = little_endian;
this.utf16_decoder = little_endian ? UTF_16LE_DECODER : UTF_16BE_DECODER;
this.utf16_encoder = little_endian ? UTF_16LE_ENCODER : UTF_16BE_ENCODER;
}
/**
* The amount of bytes left to read from the current position onward.
*/
get bytes_left(): number {
return this.size - this.position;
}
/**
* The size of the underlying buffer. This value will always be equal to or greater than the cursor's size.
*/
get capacity(): number {
return this.buffer.byteLength;
}
private _buffer: ArrayBuffer;
get buffer(): ArrayBuffer {
return this._buffer;
}
private dv: DataView;
private utf16_decoder: TextDecoder = UTF_16BE_DECODER;
private utf16_encoder: TextEncoder = UTF_16BE_ENCODER;
/**
* @param buffer_or_capacity - If an ArrayBuffer or Buffer is given, writes to the cursor will be reflected in this buffer and vice versa until a cursor write that requires allocating a new internal buffer happens.
* @param little_endian - Decides in which byte order multi-byte integers and floats will be interpreted.
*/
constructor(buffer_or_capacity: ArrayBuffer | Buffer | number, little_endian: boolean = false) {
if (typeof buffer_or_capacity === "number") {
this._buffer = new ArrayBuffer(buffer_or_capacity);
this.size = 0;
} else if (buffer_or_capacity instanceof ArrayBuffer) {
this._buffer = buffer_or_capacity;
this.size = buffer_or_capacity.byteLength;
} else if (buffer_or_capacity instanceof Buffer) {
// Use the backing ArrayBuffer.
this._buffer = buffer_or_capacity.buffer;
this.size = buffer_or_capacity.byteLength;
} else {
throw new Error("buffer_or_capacity should be an ArrayBuffer, a Buffer or a number.");
}
this.little_endian = little_endian;
this._position = 0;
this.dv = new DataView(this.buffer);
}
/**
* Seek forward or backward by a number of bytes.
*
* @param offset - if positive, seeks forward by offset bytes, otherwise seeks backward by -offset bytes.
*/
seek(offset: number): BufferCursor {
return this.seek_start(this.position + offset);
}
/**
* Seek forward from the start of the cursor by a number of bytes.
*
* @param offset - greater or equal to 0 and smaller than size
*/
seek_start(offset: number): BufferCursor {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = offset;
return this;
}
/**
* Seek backward from the end of the cursor by a number of bytes.
*
* @param offset - greater or equal to 0 and smaller than size
*/
seek_end(offset: number): BufferCursor {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = this.size - offset;
return this;
}
/**
* Reads an unsigned 8-bit integer and increments position by 1.
*/
u8(): number {
return this.dv.getUint8(this._position++);
}
/**
* Reads an unsigned 16-bit integer and increments position by 2.
*/
u16(): number {
const r = this.dv.getUint16(this.position, this.little_endian);
this._position += 2;
return r;
}
/**
* Reads an unsigned 32-bit integer and increments position by 4.
*/
u32(): number {
const r = this.dv.getUint32(this.position, this.little_endian);
this._position += 4;
return r;
}
/**
* Reads an signed 8-bit integer and increments position by 1.
*/
i8(): number {
return this.dv.getInt8(this._position++);
}
/**
* Reads a signed 16-bit integer and increments position by 2.
*/
i16(): number {
const r = this.dv.getInt16(this.position, this.little_endian);
this._position += 2;
return r;
}
/**
* Reads a signed 32-bit integer and increments position by 4.
*/
i32(): number {
const r = this.dv.getInt32(this.position, this.little_endian);
this._position += 4;
return r;
}
/**
* Reads a 32-bit floating point number and increments position by 4.
*/
f32(): number {
const r = this.dv.getFloat32(this.position, this.little_endian);
this._position += 4;
return r;
}
/**
* Reads n unsigned 8-bit integers and increments position by n.
*/
u8_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) array.push(this.dv.getUint8(this._position++));
return array;
}
/**
* Reads n unsigned 16-bit integers and increments position by 2n.
*/
u16_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint16(this.position, this.little_endian));
this._position += 2;
}
return array;
}
/**
* Reads n unsigned 32-bit integers and increments position by 4n.
*/
u32_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint32(this.position, this.little_endian));
this._position += 4;
}
return array;
}
/**
* Consumes a variable number of bytes.
*
* @param size - the amount bytes to consume.
* @returns a new cursor containing size bytes.
*/
take(size: number): BufferCursor {
if (size < 0 || size > this.size - this.position) {
throw new Error(`Size ${size} out of bounds.`);
}
this._position += size;
return new BufferCursor(
this.buffer.slice(this.position - size, this.position),
this.little_endian
);
}
/**
* Consumes up to maxByteLength bytes.
*/
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
const string_length = null_terminated
? this.index_of_u8(0, max_byte_length) - this.position
: max_byte_length;
const r = ASCII_DECODER.decode(new DataView(this.buffer, this.position, string_length));
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 1, max_byte_length);
return r;
}
/**
* Consumes up to maxByteLength bytes.
*/
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
const string_length = null_terminated
? this.index_of_u16(0, max_byte_length) - this.position
: Math.floor(max_byte_length / 2) * 2;
const r = this.utf16_decoder.decode(
new DataView(this.buffer, this.position, string_length)
);
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 2, max_byte_length);
return r;
}
/**
* Writes an unsigned 8-bit integer and increments position by 1. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_u8(value: number): BufferCursor {
this.ensure_capacity(this.position + 1);
this.dv.setUint8(this._position++, value);
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes an unsigned 16-bit integer and increments position by 2. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_u16(value: number): BufferCursor {
this.ensure_capacity(this.position + 2);
this.dv.setUint16(this.position, value, this.little_endian);
this._position += 2;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes an unsigned 32-bit integer and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_u32(value: number): BufferCursor {
this.ensure_capacity(this.position + 4);
this.dv.setUint32(this.position, value, this.little_endian);
this._position += 4;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes a signed 32-bit integer and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_i32(value: number): BufferCursor {
this.ensure_capacity(this.position + 4);
this.dv.setInt32(this.position, value, this.little_endian);
this._position += 4;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes a 32-bit floating point number and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_f32(value: number): BufferCursor {
this.ensure_capacity(this.position + 4);
this.dv.setFloat32(this.position, value, this.little_endian);
this._position += 4;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes an array of unsigned 8-bit integers and increments position by the array's length. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_u8_array(array: number[]): BufferCursor {
this.ensure_capacity(this.position + array.length);
new Uint8Array(this.buffer, this.position).set(new Uint8Array(array));
this._position += array.length;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
/**
* Writes the contents of other and increments position by the size of other. If necessary, grows the cursor and reallocates the underlying buffer.
*/
write_cursor(other: BufferCursor): BufferCursor {
this.ensure_capacity(this.position + other.size);
new Uint8Array(this.buffer, this.position).set(new Uint8Array(other.buffer));
this._position += other.size;
if (this.position > this.size) {
this.size = this.position;
}
return this;
}
write_string_ascii(str: string, byte_length: number): BufferCursor {
let i = 0;
for (const byte of ASCII_ENCODER.encode(str)) {
if (i < byte_length) {
this.write_u8(byte);
++i;
}
}
while (i < byte_length) {
this.write_u8(0);
++i;
}
return this;
}
/**
* @returns a Uint8Array that remains a write-through view of the underlying array buffer until the buffer is reallocated.
*/
uint8_array_view(): Uint8Array {
return new Uint8Array(this.buffer, 0, this.size);
}
private index_of_u8(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; ++i) {
if (this.dv.getUint8(i) === value) {
return i;
}
}
return this.position + max_byte_length;
}
private index_of_u16(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; i += 2) {
if (this.dv.getUint16(i, this.little_endian) === value) {
return i;
}
}
return this.position + max_byte_length;
}
/**
* Increases buffer size if necessary.
*/
private ensure_capacity(min_new_size: number): void {
if (min_new_size > this.capacity) {
let new_size = this.capacity || min_new_size;
do {
new_size *= 2;
} while (new_size < min_new_size);
const new_buffer = new ArrayBuffer(new_size);
new Uint8Array(new_buffer).set(new Uint8Array(this.buffer, 0, this.size));
this._buffer = new_buffer;
this.dv = new DataView(this.buffer);
}
}
}

View File

@ -0,0 +1,25 @@
import { ResizableBuffer } from "./ResizableBuffer";
test("simple properties and invariants", () => {
const capacity = 500;
const rb = new ResizableBuffer(capacity);
expect(rb.size).toBe(0);
expect(rb.capacity).toBe(capacity);
expect(rb.backing_buffer.byteLength).toBe(capacity);
expect(rb.view.byteOffset).toBe(0);
expect(rb.view.byteLength).toBe(capacity);
});
test("reallocation of internal buffer when necessary", () => {
const rb = new ResizableBuffer(100);
expect(rb.size).toBe(0);
expect(rb.capacity).toBe(100);
rb.size = 101;
expect(rb.size).toBe(101);
expect(rb.capacity).toBeGreaterThanOrEqual(101);
expect(rb.view.byteLength).toBeGreaterThanOrEqual(101);
});

View File

@ -0,0 +1,62 @@
/**
* Resizable buffer.
*/
export class ResizableBuffer {
private _size: number = 0;
get size(): number {
return this._size;
}
set size(size: number) {
if (size < 0) {
throw new Error("Size should be non-negative.");
}
this.ensure_capacity(size);
this._size = size;
}
get capacity(): number {
return this._buffer.byteLength;
}
private _buffer: ArrayBuffer;
get backing_buffer(): ArrayBuffer {
return this._buffer;
}
private _data_view: DataView;
get view(): DataView {
return this._data_view;
}
constructor(initial_capacity: number = 8192) {
this._buffer = new ArrayBuffer(initial_capacity);
this._data_view = new DataView(this._buffer);
}
sub_view(offset: number, size: number): DataView {
return new DataView(this._buffer, offset, size);
}
/**
* Increases buffer size if necessary.
*/
private ensure_capacity(min_new_size: number): void {
if (min_new_size > this.capacity) {
let new_size = this.capacity || min_new_size;
do {
new_size *= 2;
} while (new_size < min_new_size);
const new_buffer = new ArrayBuffer(new_size);
new Uint8Array(new_buffer).set(new Uint8Array(this._buffer, 0, this.size));
this._buffer = new_buffer;
this._data_view = new DataView(this._buffer);
}
}
}

View File

@ -1,6 +1,9 @@
import { BufferCursor } from "../../BufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { WritableCursor } from "../../cursor/WritableCursor";
import { WritableResizableBufferCursor } from "../../cursor/WritableResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
export function compress(src: BufferCursor): BufferCursor {
export function compress(src: Cursor): Cursor {
const ctx = new Context(src);
const hash_table = new HashTable();
@ -106,15 +109,18 @@ const WINDOW_MASK = MAX_WINDOW - 1;
const HASH_SIZE = 1 << 8;
class Context {
src: BufferCursor;
dst: BufferCursor;
src: Cursor;
dst: WritableCursor;
flags: number;
flag_bits_left: number;
flag_offset: number;
constructor(cursor: BufferCursor) {
constructor(cursor: Cursor) {
this.src = cursor;
this.dst = new BufferCursor(cursor.size, cursor.little_endian);
this.dst = new WritableResizableBufferCursor(
new ResizableBuffer(cursor.size),
cursor.endianness
);
this.flags = 0;
this.flag_bits_left = 0;
this.flag_offset = 0;
@ -148,7 +154,7 @@ class Context {
this.dst.write_u8(value);
}
writeFinalFlags(): void {
write_final_flags(): void {
this.flags >>>= this.flag_bits_left;
const pos = this.dst.position;
this.dst
@ -161,18 +167,18 @@ class Context {
this.set_bit(0);
this.set_bit(1);
this.writeFinalFlags();
this.write_final_flags();
this.write_literal(0);
this.write_literal(0);
}
match_length(s2: number): number {
const array = this.src.uint8_array_view();
let len = 0;
let s1 = this.src.position;
const size = this.src.size;
while (s1 < array.byteLength && array[s1] === array[s2]) {
while (s1 < size && this.src.u8_at(s1) === this.src.u8_at(s2)) {
++len;
++s1;
++s2;
@ -263,7 +269,7 @@ class HashTable {
hash_to_offset: (number | null)[] = new Array(HASH_SIZE).fill(null);
masked_offset_to_prev: (number | null)[] = new Array(MAX_WINDOW).fill(null);
hash(cursor: BufferCursor): number {
hash(cursor: Cursor): number {
let hash = cursor.u8();
if (cursor.bytes_left) {

View File

@ -1,9 +1,12 @@
import { BufferCursor } from "../../BufferCursor";
import Logger from "js-logger";
import { Cursor } from "../../cursor/Cursor";
import { WritableCursor } from "../../cursor/WritableCursor";
import { WritableResizableBufferCursor } from "../../cursor/WritableResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
const logger = Logger.get("data_formats/compression/prs/decompress");
export function decompress(cursor: BufferCursor): BufferCursor {
export function decompress(cursor: Cursor): Cursor {
const ctx = new Context(cursor);
while (true) {
@ -53,14 +56,17 @@ export function decompress(cursor: BufferCursor): BufferCursor {
}
class Context {
src: BufferCursor;
dst: BufferCursor;
src: Cursor;
dst: WritableCursor;
flags: number;
flag_bits_left: number;
constructor(cursor: BufferCursor) {
constructor(cursor: Cursor) {
this.src = cursor;
this.dst = new BufferCursor(4 * cursor.size, cursor.little_endian);
this.dst = new WritableResizableBufferCursor(
new ResizableBuffer(Math.floor(1.5 * cursor.size)),
cursor.endianness
);
this.flags = 0;
this.flag_bits_left = 0;
}
@ -108,6 +114,7 @@ class Context {
for (let i = 0; i < Math.floor(length / buf_size); ++i) {
this.dst.write_cursor(buf);
buf.seek_start(0);
}
this.dst.write_cursor(buf.take(length % buf_size));

View File

@ -1,14 +1,9 @@
import { BufferCursor } from "../../BufferCursor";
import { Endianness } from "../..";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { compress, decompress } from "../prs";
function test_with_bytes(bytes: number[], expected_compressed_size: number): void {
const cursor = new BufferCursor(new Uint8Array(bytes).buffer, true);
for (const byte of bytes) {
cursor.write_u8(byte);
}
cursor.seek_start(0);
const cursor = new ArrayBufferCursor(new Uint8Array(bytes).buffer, Endianness.Little);
const compressed_cursor = compress(cursor);
expect(compressed_cursor.size).toBe(expected_compressed_size);

View File

@ -0,0 +1,269 @@
import {
ASCII_DECODER,
UTF_16BE_DECODER,
UTF_16BE_ENCODER,
UTF_16LE_DECODER,
UTF_16LE_ENCODER,
} from ".";
import { Endianness } from "..";
import { Cursor } from "./Cursor";
/**
* A cursor for reading from an array buffer or part of an array buffer.
*/
export class ArrayBufferCursor implements Cursor {
get offset(): number {
return this.dv.byteOffset;
}
get size(): number {
return this.dv.byteLength;
}
set size(size: number) {
this.dv = new DataView(this.buffer, this.offset, size);
}
protected _position: number;
get position(): number {
return this._position;
}
protected little_endian!: boolean;
get endianness(): Endianness {
return this.little_endian ? Endianness.Little : Endianness.Big;
}
set endianness(endianness: Endianness) {
this.little_endian = endianness === Endianness.Little;
this.utf16_decoder = this.little_endian ? UTF_16LE_DECODER : UTF_16BE_DECODER;
this.utf16_encoder = this.little_endian ? UTF_16LE_ENCODER : UTF_16BE_ENCODER;
}
get bytes_left(): number {
return this.size - this.position;
}
protected buffer: ArrayBuffer;
protected dv: DataView;
private utf16_decoder: TextDecoder = UTF_16BE_DECODER;
private utf16_encoder: TextEncoder = UTF_16BE_ENCODER;
/**
* @param buffer The buffer to read from.
* @param endianness Decides in which byte order multi-byte integers and floats will be interpreted.
* @param offset The start offset of the part that will be read from.
* @param size The size of the part that will be read from.
*/
constructor(
buffer: ArrayBuffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.byteLength
) {
this.buffer = buffer;
this.dv = new DataView(buffer, offset, size);
this.endianness = endianness;
this._position = 0;
}
seek(offset: number): this {
return this.seek_start(this.position + offset);
}
seek_start(offset: number): this {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = offset;
return this;
}
seek_end(offset: number): this {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = this.size - offset;
return this;
}
u8(): number {
return this.u8_at(this._position++);
}
u8_at(offset: number): number {
return this.dv.getUint8(offset);
}
u16(): number {
const r = this.u16_at(this.position);
this._position += 2;
return r;
}
u16_at(offset: number): number {
return this.dv.getUint16(offset, this.little_endian);
}
u32(): number {
const r = this.u32_at(this.position);
this._position += 4;
return r;
}
u32_at(offset: number): number {
return this.dv.getUint32(offset, this.little_endian);
}
i8(): number {
return this.i8_at(this._position++);
}
i8_at(offset: number): number {
return this.dv.getInt8(offset);
}
i16(): number {
const r = this.i16_at(this.position);
this._position += 2;
return r;
}
i16_at(offset: number): number {
return this.dv.getInt16(offset, this.little_endian);
}
i32(): number {
const r = this.i32_at(this.position);
this._position += 4;
return r;
}
i32_at(offset: number): number {
return this.dv.getInt32(offset, this.little_endian);
}
f32(): number {
const r = this.f32_at(this.position);
this._position += 4;
return r;
}
f32_at(offset: number): number {
return this.dv.getFloat32(offset, this.little_endian);
}
u8_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) array.push(this.dv.getUint8(this._position++));
return array;
}
u16_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint16(this.position, this.little_endian));
this._position += 2;
}
return array;
}
u32_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint32(this.position, this.little_endian));
this._position += 4;
}
return array;
}
take(size: number): ArrayBufferCursor {
const offset = this.offset + this.position;
const wrapper = new ArrayBufferCursor(this.buffer, this.endianness, offset, size);
this._position += size;
return wrapper;
}
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
const string_length = null_terminated
? this.index_of_u8(0, max_byte_length) - this.position
: max_byte_length;
const view = new DataView(this.buffer, this.offset + this.position, string_length);
const r = ASCII_DECODER.decode(view);
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 1, max_byte_length);
return r;
}
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
const string_length = null_terminated
? this.index_of_u16(0, max_byte_length) - this.position
: Math.floor(max_byte_length / 2) * 2;
const view = new DataView(this.buffer, this.offset + this.position, string_length);
const r = this.utf16_decoder.decode(view);
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 2, max_byte_length);
return r;
}
array_buffer(size: number = this.size - this.position): ArrayBuffer {
const r = this.buffer.slice(this.offset + this.position, size);
this._position += size;
return r;
}
copy_to_uint8_array(array: Uint8Array, size: number = this.size - this.position): this {
array.set(new Uint8Array(this.buffer, this.offset + this.position, size));
this._position += size;
return this;
}
private index_of_u8(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; ++i) {
if (this.dv.getUint8(i) === value) {
return i;
}
}
return this.position + max_byte_length;
}
private index_of_u16(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; i += 2) {
if (this.dv.getUint16(i, this.little_endian) === value) {
return i;
}
}
return this.position + max_byte_length;
}
}

View File

@ -0,0 +1,27 @@
import { ArrayBufferCursor } from "./ArrayBufferCursor";
import { Endianness } from "..";
export class BufferCursor extends ArrayBufferCursor {
/**
* @param buffer The buffer to read from.
* @param endianness Decides in which byte order multi-byte integers and floats will be interpreted.
* @param offset The start offset of the part that will be read from.
* @param size The size of the part that will be read from.
*/
constructor(
buffer: Buffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.byteLength
) {
if (offset < 0 || offset > buffer.byteLength) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
if (size < 0 || size > buffer.byteLength - offset) {
throw new Error(`Size ${size} is out of bounds.`);
}
super(buffer.buffer, endianness, buffer.byteOffset + offset, size);
}
}

View File

@ -0,0 +1,214 @@
import { Endianness } from "..";
import { enum_values } from "../../enums";
import { ResizableBuffer } from "../ResizableBuffer";
import { ArrayBufferCursor } from "./ArrayBufferCursor";
import { BufferCursor } from "./BufferCursor";
import { Cursor } from "./Cursor";
import { ResizableBufferCursor } from "./ResizableBufferCursor";
import { WritableArrayBufferCursor } from "./WritableArrayBufferCursor";
import { WritableResizableBufferCursor } from "./WritableResizableBufferCursor";
/**
* Run a test on every cursor implementation with every endianness.
*
* @param name name of the test, cursor name and endianness will be appended
* @param bytes an array of bytes which will be used to initialize each cursor.
* @param run_test the test case, will be called with every cursor-endianness combination.
*/
function test_all(
name: string,
bytes: (endianness: Endianness) => number[],
run_test: (cursor: Cursor, endianness: Endianness) => void
): void {
const endiannesses = enum_values<Endianness>(Endianness);
function rbuf(endianness: Endianness): ResizableBuffer {
const byte_array = bytes(endianness);
const rbuf = new ResizableBuffer(byte_array.length);
rbuf.size = byte_array.length;
for (let i = 0; i < byte_array.length; i++) {
rbuf.view.setUint8(i, byte_array[i]);
}
return rbuf;
}
const cursors: [string, Endianness, Cursor][] = [
...endiannesses.map(endianness => [
ArrayBufferCursor.name,
endianness,
new ArrayBufferCursor(new Uint8Array(bytes(endianness)).buffer, endianness),
]),
...endiannesses.map(endianness => [
BufferCursor.name,
endianness,
new BufferCursor(Buffer.from(bytes(endianness)), endianness),
]),
...endiannesses.map(endianness => [
ResizableBufferCursor.name,
endianness,
new ResizableBufferCursor(rbuf(endianness), endianness),
]),
...endiannesses.map(endianness => [
WritableArrayBufferCursor.name,
endianness,
new WritableArrayBufferCursor(new Uint8Array(bytes(endianness)).buffer, endianness),
]),
...endiannesses.map(endianness => [
WritableResizableBufferCursor.name,
endianness,
new WritableResizableBufferCursor(rbuf(endianness), endianness),
]),
] as any;
for (const [cursor_name, endianness, cursor] of cursors) {
test(`${name} (${cursor_name} ${Endianness[endianness].toLowerCase()} endian)`, () => {
run_test(cursor, endianness);
});
}
}
test_all(
"simple properties and invariants",
() => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
(cursor, endianness) => {
for (const [seek_to, expected_pos] of [[0, 0], [3, 3], [5, 8], [2, 10], [-10, 0]]) {
cursor.seek(seek_to);
expect(cursor.size).toBe(10);
expect(cursor.position).toBe(expected_pos);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.endianness).toBe(endianness);
}
}
);
test_all(
"correct byte order handling",
() => [1, 2, 3, 4],
(cursor, endianness) => {
if (endianness === Endianness.Little) {
expect(cursor.u32()).toBe(0x04030201);
} else {
expect(cursor.u32()).toBe(0x01020304);
}
}
);
/**
* Reads two integers.
*/
function test_integer_read(method_name: string): void {
const byte_count = parseInt(method_name.replace(/^[iu](\d+)$/, "$1"), 10) / 8;
let expected_number_1 = 0;
let expected_number_2 = 0;
// Array of bytes per endianness.
const bytes: [number[], number[]] = [[], []];
// Generate numbers of the form 0x010203...
for (let i = 1; i <= byte_count; ++i) {
expected_number_1 <<= 8;
expected_number_1 |= i;
bytes[Endianness.Little].push(byte_count - i + 1);
bytes[Endianness.Big].push(i);
}
for (let i = 1; i <= byte_count; ++i) {
expected_number_2 <<= 8;
expected_number_2 |= byte_count + i;
bytes[Endianness.Little].push(2 * byte_count - i + 1);
bytes[Endianness.Big].push(byte_count + i);
}
test_all(
method_name,
endianness => bytes[endianness],
cursor => {
expect((cursor as any)[method_name]()).toBe(expected_number_1);
expect(cursor.position).toBe(byte_count);
expect((cursor as any)[method_name]()).toBe(expected_number_2);
expect(cursor.position).toBe(2 * byte_count);
}
);
}
test_integer_read("u8");
test_integer_read("u16");
test_integer_read("u32");
test_integer_read("i8");
test_integer_read("i16");
test_integer_read("i32");
test_all(
"u8_array",
() => [1, 2, 3, 4, 5, 6, 7, 8],
cursor => {
expect(cursor.u8_array(3)).toEqual([1, 2, 3]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.seek_start(5).u8_array(3)).toEqual([6, 7, 8]);
}
);
test_all(
"u16_array",
() => [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8],
cursor => {
expect(cursor.u16_array(3)).toEqual([0x0101, 0x0202, 0x0303]);
expect(cursor.seek_start(4).u16_array(4)).toEqual([0x0303, 0x0404, 0x0505, 0x0606]);
expect(cursor.seek_start(10).u16_array(3)).toEqual([0x0606, 0x0707, 0x0808]);
}
);
function test_string_read(method_name: string, char_size: number): void {
// Array of bytes per endianness.
const bytes: [number[], number[]] = [[], []];
for (const endianness of enum_values<Endianness>(Endianness)) {
const char_array_copy = bytes[endianness];
for (const char of [7, 65, 66, 0, 255, 13]) {
if (endianness === Endianness.Little) char_array_copy.push(char);
for (let i = 0; i < char_size - 1; ++i) {
char_array_copy.push(0);
}
if (endianness === Endianness.Big) char_array_copy.push(char);
}
}
test_all(
method_name,
endianness => bytes[endianness],
cursor => {
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, true)).toBe("AB");
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, true)).toBe("AB");
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, false)).toBe("AB");
expect(cursor.position).toBe(4 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, false)).toBe("AB");
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, true)).toBe("AB\0ÿ");
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, false)).toBe("AB\0ÿ");
expect(cursor.position).toBe(5 * char_size);
}
);
}
test_string_read("string_ascii", 1);
test_string_read("string_utf16", 2);

View File

@ -0,0 +1,159 @@
import { Endianness } from "..";
/**
* A cursor for reading binary data.
*/
export interface Cursor {
/**
* The cursor's size. This value will always be non-negative and equal to or smaller than the cursor's capacity.
*/
readonly size: number;
/**
* The position from where bytes will be read or written.
*/
readonly position: number;
/**
* Byte order mode.
*/
endianness: Endianness;
readonly bytes_left: number;
/**
* Seek forward or backward by a number of bytes.
*
* @param offset - if positive, seeks forward by offset bytes, otherwise seeks backward by -offset bytes.
*/
seek(offset: number): this;
/**
* Seek forward from the start of the cursor by a number of bytes.
*
* @param offset - greater or equal to 0 and smaller than size
*/
seek_start(offset: number): this;
/**
* Seek backward from the end of the cursor by a number of bytes.
*
* @param offset - greater or equal to 0 and smaller than size
*/
seek_end(offset: number): this;
/**
* Reads an unsigned 8-bit integer and increments position by 1.
*/
u8(): number;
/**
* Reads an unsigned 8-bit integer at the given absolute offset. Doesn't increment position.
*/
u8_at(offset: number): number;
/**
* Reads an unsigned 16-bit integer and increments position by 2.
*/
u16(): number;
/**
* Reads an unsigned 16-bit integer at the given absolute offset. Doesn't increment position.
*/
u16_at(offset: number): number;
/**
* Reads an unsigned 32-bit integer and increments position by 4.
*/
u32(): number;
/**
* Reads an unsigned 32-bit integer at the given absolute offset. Doesn't increment position.
*/
u32_at(offset: number): number;
/**
* Reads an signed 8-bit integer and increments position by 1.
*/
i8(): number;
/**
* Reads an unsigned 8-bit integer at the given absolute offset. Doesn't increment position.
*/
i8_at(offset: number): number;
/**
* Reads a signed 16-bit integer and increments position by 2.
*/
i16(): number;
/**
* Reads an unsigned 16-bit integer at the given absolute offset. Doesn't increment position.
*/
i16_at(offset: number): number;
/**
* Reads a signed 32-bit integer and increments position by 4.
*/
i32(): number;
/**
* Reads an unsigned 32-bit integer at the given absolute offset. Doesn't increment position.
*/
i32_at(offset: number): number;
/**
* Reads a 32-bit floating point number and increments position by 4.
*/
f32(): number;
/**
* Reads a 32-bit floating point number/ Doesn't increment position.
*/
f32_at(offset: number): number;
/**
* Reads n unsigned 8-bit integers and increments position by n.
*/
u8_array(n: number): number[];
/**
* Reads n unsigned 16-bit integers and increments position by 2n.
*/
u16_array(n: number): number[];
/**
* Reads n unsigned 32-bit integers and increments position by 4n.
*/
u32_array(n: number): number[];
/**
* Consumes a variable number of bytes.
*
* @param size - the amount bytes to consume.
* @returns a write-through view containing size bytes.
*/
take(size: number): Cursor;
/**
* Consumes up to max_byte_length bytes.
*/
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string;
/**
* Consumes up to max_byte_length bytes.
*/
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string;
array_buffer(size?: number): ArrayBuffer;
copy_to_uint8_array(array: Uint8Array, size?: number): this;
}

View File

@ -0,0 +1,319 @@
import {
UTF_16BE_DECODER,
UTF_16BE_ENCODER,
UTF_16LE_DECODER,
UTF_16LE_ENCODER,
ASCII_DECODER,
} from ".";
import { Endianness } from "..";
import { ResizableBuffer } from "../ResizableBuffer";
import { Cursor } from "./Cursor";
export class ResizableBufferCursor implements Cursor {
private _offset: number;
get offset(): number {
return this._offset;
}
protected _size: number;
get size(): number {
return this._size;
}
protected _position: number;
get position(): number {
return this._position;
}
protected little_endian!: boolean;
get endianness(): Endianness {
return this.little_endian ? Endianness.Little : Endianness.Big;
}
set endianness(endianness: Endianness) {
this.little_endian = endianness === Endianness.Little;
this.utf16_decoder = this.little_endian ? UTF_16LE_DECODER : UTF_16BE_DECODER;
this.utf16_encoder = this.little_endian ? UTF_16LE_ENCODER : UTF_16BE_ENCODER;
}
get bytes_left(): number {
return this.size - this.position;
}
protected buffer: ResizableBuffer;
protected get dv(): DataView {
return this.buffer.view;
}
private utf16_decoder: TextDecoder = UTF_16BE_DECODER;
private utf16_encoder: TextEncoder = UTF_16BE_ENCODER;
/**
* @param buffer The buffer to read from.
* @param endianness Decides in which byte order multi-byte integers and floats will be interpreted.
* @param offset The start offset of the part that will be read from.
* @param size The size of the part that will be read from.
*/
constructor(
buffer: ResizableBuffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.size
) {
if (offset < 0 || offset > buffer.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
if (size < 0 || offset + size > buffer.size) {
throw new Error(`Size ${size} is out of bounds.`);
}
this.buffer = buffer;
this.endianness = endianness;
this._offset = offset;
this._size = size;
this._position = 0;
}
seek(offset: number): this {
return this.seek_start(this.position + offset);
}
seek_start(offset: number): this {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = offset;
return this;
}
seek_end(offset: number): this {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
this._position = this.size - offset;
return this;
}
u8(): number {
return this.u8_at(this._position++);
}
u8_at(offset: number): number {
this.check_offset(offset, 1);
return this.dv.getUint8(this.offset + offset);
}
u16(): number {
const r = this.u16_at(this.position);
this._position += 2;
return r;
}
u16_at(offset: number): number {
this.check_offset(offset, 2);
return this.dv.getUint16(this.offset + offset, this.little_endian);
}
u32(): number {
const r = this.u32_at(this.position);
this._position += 4;
return r;
}
u32_at(offset: number): number {
this.check_offset(offset, 4);
return this.dv.getUint32(this.offset + offset, this.little_endian);
}
i8(): number {
return this.i8_at(this._position++);
}
i8_at(offset: number): number {
this.check_offset(offset, 1);
return this.dv.getInt8(this.offset + offset);
}
i16(): number {
const r = this.i16_at(this.position);
this._position += 2;
return r;
}
i16_at(offset: number): number {
this.check_offset(offset, 2);
return this.dv.getInt16(this.offset + offset, this.little_endian);
}
i32(): number {
const r = this.i32_at(this.position);
this._position += 4;
return r;
}
i32_at(offset: number): number {
this.check_offset(offset, 4);
return this.dv.getInt32(this.offset + offset, this.little_endian);
}
f32(): number {
const r = this.f32_at(this.position);
this._position += 4;
return r;
}
f32_at(offset: number): number {
this.check_offset(offset, 4);
return this.dv.getFloat32(this.offset + offset, this.little_endian);
}
u8_array(n: number): number[] {
this.check_size("n", n, n);
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint8(this.offset + this._position++));
}
return array;
}
u16_array(n: number): number[] {
this.check_size("n", n, 2 * n);
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint16(this.offset + this.position, this.little_endian));
this._position += 2;
}
return array;
}
u32_array(n: number): number[] {
this.check_size("n", n, 4 * n);
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint32(this.offset + this.position, this.little_endian));
this._position += 4;
}
return array;
}
take(size: number): ResizableBufferCursor {
this.check_size("size", size, size);
const offset = this.offset + this.position;
const wrapper = new ResizableBufferCursor(this.buffer, this.endianness, offset, size);
this._position += size;
return wrapper;
}
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
this.check_size("max_byte_length", max_byte_length, max_byte_length);
const string_length = null_terminated
? this.index_of_u8(0, max_byte_length) - this.position
: max_byte_length;
const view = this.buffer.sub_view(this.offset + this.position, string_length);
const r = ASCII_DECODER.decode(view);
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 1, max_byte_length);
return r;
}
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
): string {
this.check_size("max_byte_length", max_byte_length, max_byte_length);
const string_length = null_terminated
? this.index_of_u16(0, max_byte_length) - this.position
: Math.floor(max_byte_length / 2) * 2;
const view = this.buffer.sub_view(this.offset + this.position, string_length);
const r = this.utf16_decoder.decode(view);
this._position += drop_remaining
? max_byte_length
: Math.min(string_length + 2, max_byte_length);
return r;
}
array_buffer(size: number = this.size - this.position): ArrayBuffer {
this.check_size("size", size, size);
const r = this.buffer.backing_buffer.slice(this.offset + this.position, size);
this._position += size;
return r;
}
copy_to_uint8_array(array: Uint8Array, size: number = this.size - this.position): this {
this.check_size("size", size, size);
array.set(new Uint8Array(this.buffer.backing_buffer, this.offset + this.position, size));
this._position += size;
return this;
}
private check_size(name: string, value: number, byte_size: number): void {
if (byte_size < 0 || byte_size > this.size - this.position) {
throw new Error(`${name} ${value} is out of bounds.`);
}
}
/**
* Checks whether we can read size bytes at offset.
*/
protected check_offset(offset: number, size: number): void {
if (offset < 0 || offset + size > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
}
private index_of_u8(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; ++i) {
if (this.dv.getUint8(i) === value) {
return i;
}
}
return this.position + max_byte_length;
}
private index_of_u16(value: number, max_byte_length: number): number {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < max_pos; i += 2) {
if (this.dv.getUint16(i, this.little_endian) === value) {
return i;
}
}
return this.position + max_byte_length;
}
}

View File

@ -0,0 +1,72 @@
import { ArrayBufferCursor } from "./ArrayBufferCursor";
import { WritableCursor } from "./WritableCursor";
import { ASCII_ENCODER } from ".";
import { Cursor } from "./Cursor";
/**
* A cursor for reading and writing from an array buffer or part of an array buffer.
*/
export class WritableArrayBufferCursor extends ArrayBufferCursor implements WritableCursor {
write_u8(value: number): this {
this.dv.setUint8(this._position++, value);
return this;
}
write_u16(value: number): this {
this.dv.setUint16(this.position, value, this.little_endian);
this._position += 2;
return this;
}
write_u32(value: number): this {
this.dv.setUint32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_i32(value: number): this {
this.dv.setInt32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_f32(value: number): this {
this.dv.setFloat32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_u8_array(array: number[]): this {
new Uint8Array(this.buffer, this.offset + this.position).set(new Uint8Array(array));
this._position += array.length;
return this;
}
write_cursor(other: Cursor): this {
const size = other.size - other.position;
other.copy_to_uint8_array(
new Uint8Array(this.buffer, this.offset + this.position, size),
size
);
this._position += size;
return this;
}
write_string_ascii(str: string, byte_length: number): this {
let i = 0;
for (const byte of ASCII_ENCODER.encode(str)) {
if (i < byte_length) {
this.write_u8(byte);
++i;
}
}
while (i < byte_length) {
this.write_u8(0);
++i;
}
return this;
}
}

View File

@ -0,0 +1,191 @@
import { Endianness } from "..";
import { enum_values } from "../../enums";
import { ResizableBuffer } from "../ResizableBuffer";
import { WritableArrayBufferCursor } from "./WritableArrayBufferCursor";
import { WritableCursor } from "./WritableCursor";
import { WritableResizableBufferCursor } from "./WritableResizableBufferCursor";
/**
* Run a test on every writable cursor implementation with every endianness.
*
* @param name name of the test, cursor name and endianness will be appended
* @param bytes an array of bytes which will be used to initialize each cursor.
* @param run_test the test case, will be called with every cursor-endianness combination.
*/
function test_all(
name: string,
bytes: (endianness: Endianness) => number[],
run_test: (cursor: WritableCursor, endianness: Endianness) => void
): void {
const endiannesses = enum_values<Endianness>(Endianness);
function rbuf(endianness: Endianness): ResizableBuffer {
const byte_array = bytes(endianness);
const rbuf = new ResizableBuffer(byte_array.length);
rbuf.size = byte_array.length;
for (let i = 0; i < byte_array.length; i++) {
rbuf.view.setUint8(i, byte_array[i]);
}
return rbuf;
}
const cursors: [string, Endianness, WritableCursor][] = [
...endiannesses.map(endianness => [
WritableArrayBufferCursor.name,
endianness,
new WritableArrayBufferCursor(new Uint8Array(bytes(endianness)).buffer, endianness),
]),
...endiannesses.map(endianness => [
WritableResizableBufferCursor.name,
endianness,
new WritableResizableBufferCursor(rbuf(endianness), endianness),
]),
] as any;
for (const [cursor_name, endianness, cursor] of cursors) {
test(`${name} (${cursor_name} ${Endianness[endianness].toLowerCase()} endian)`, () => {
run_test(cursor, endianness);
});
}
}
test_all(
"simple properties and invariants",
() => [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
(cursor, endianness) => {
expect(cursor.position).toBe(0);
cursor
.write_u8(99)
.write_u8(99)
.write_u8(99)
.write_u8(99);
cursor.seek(-1);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.size).toBe(10);
expect(cursor.position).toBe(3);
expect(cursor.bytes_left).toBe(7);
expect(cursor.endianness).toBe(endianness);
}
);
/**
* Writes and reads two integers.
*/
function test_integer_write(method_name: string): void {
const byte_count = parseInt(method_name.replace(/^write_[iu](\d+)$/, "$1"), 10) / 8;
let expected_number_1 = 0;
let expected_number_2 = 0;
// Generate numbers of the form 0x010203...
for (let i = 1; i <= byte_count; ++i) {
expected_number_1 <<= 8;
expected_number_1 |= i;
}
for (let i = 1; i <= byte_count; ++i) {
expected_number_2 <<= 8;
expected_number_2 |= byte_count + i;
}
const read_method_name = method_name.replace("write_", "");
test_all(
method_name,
() => [0, 0, 0, 0, 0, 0, 0, 0],
cursor => {
(cursor as any)[method_name](expected_number_1);
(cursor as any)[method_name](expected_number_2);
expect(cursor.position).toBe(2 * byte_count);
cursor.seek_start(0);
expect((cursor as any)[read_method_name]()).toBe(expected_number_1);
expect((cursor as any)[read_method_name]()).toBe(expected_number_2);
}
);
}
test_integer_write("write_u8");
test_integer_write("write_u16");
test_integer_write("write_u32");
test_integer_write("write_i32");
/**
* Writes and reads two floats.
*/
test_all(
"write_f32",
() => [0, 0, 0, 0, 0, 0, 0, 0],
cursor => {
cursor.write_f32(1337.9001);
cursor.write_f32(103.502);
expect(cursor.position).toBe(8);
cursor.seek_start(0);
expect(cursor.f32()).toBeCloseTo(1337.9001, 4);
expect(cursor.f32()).toBeCloseTo(103.502, 3);
expect(cursor.position).toBe(8);
}
);
test_all(
"write_u8_array",
() => new Array<number>(20).fill(0),
cursor => {
const test_array_1 = [];
const test_array_2 = [];
for (let i = 1; i <= 10; ++i) {
test_array_1.push(i);
test_array_2.push(i + 10);
}
cursor.write_u8_array(test_array_1);
expect(cursor.position).toBe(10);
cursor.write_u8_array(test_array_2);
expect(cursor.position).toBe(20);
cursor.seek_start(0);
for (let i = 0; i < 10; ++i) {
expect(cursor.u8()).toBe(test_array_1[i]);
}
for (let i = 0; i < 10; ++i) {
expect(cursor.u8()).toBe(test_array_2[i]);
}
expect(cursor.position).toBe(20);
}
);
test_all(
"write, seek backwards then take",
() => new Array<number>(16).fill(0),
cursor => {
cursor
.write_u32(1)
.write_u32(2)
.write_u32(3)
.write_u32(4);
cursor.seek(-8);
const new_cursor = cursor.take(8);
expect(new_cursor.size).toBe(8);
expect(new_cursor.position).toBe(0);
expect(new_cursor.u32()).toBe(3);
expect(new_cursor.u32()).toBe(4);
}
);

View File

@ -0,0 +1,48 @@
import { Cursor } from "./Cursor";
/**
* A cursor for reading and writing binary data.
*/
export interface WritableCursor extends Cursor {
size: number;
/**
* Writes an unsigned 8-bit integer and increments position by 1.
*/
write_u8(value: number): this;
/**
* Writes an unsigned 16-bit integer and increments position by 2.
*/
write_u16(value: number): this;
/**
* Writes an unsigned 32-bit integer and increments position by 4.
*/
write_u32(value: number): this;
/**
* Writes a signed 32-bit integer and increments position by 4.
*/
write_i32(value: number): this;
/**
* Writes a 32-bit floating point number and increments position by 4.
*/
write_f32(value: number): this;
/**
* Writes an array of unsigned 8-bit integers and increments position by the array's length.
*/
write_u8_array(array: number[]): this;
/**
* Writes the contents of the given cursor from its position to its end. Increments this cursor's and the given cursor's position by the size of the given cursor.
*/
write_cursor(other: Cursor): this;
/**
* Writes byte_length characters of str. If str is shorter than byte_length, nul bytes will be inserted until byte_length bytes have been written.
*/
write_string_ascii(str: string, byte_length: number): this;
}

View File

@ -0,0 +1,50 @@
import { WritableResizableBufferCursor } from "./WritableResizableBufferCursor";
import { ResizableBuffer } from "../ResizableBuffer";
import { Endianness } from "..";
/**
* Writes two integers to a cursor backed with a buffer of size 0.
* Tests that size is automatically increased.
*/
function test_integer_write(method_name: string): void {
test(`${method_name} increases buffer and cursor size`, () => {
const byte_count = parseInt(method_name.replace(/^write_[iu](\d+)$/, "$1"), 10) / 8;
let expected_number_1 = 98749;
let expected_number_2 = 7348942;
const buf = new ResizableBuffer(8);
const cursor = new WritableResizableBufferCursor(buf, Endianness.Little);
expect(buf.size).toBe(0);
expect(cursor.size).toBe(0);
(cursor as any)[method_name](expected_number_1);
(cursor as any)[method_name](expected_number_2);
expect(buf.size).toBe(2 * byte_count);
expect(cursor.position).toBe(2 * byte_count);
expect(cursor.size).toBe(2 * byte_count);
});
}
test_integer_write("write_u8");
test_integer_write("write_u16");
test_integer_write("write_u32");
test_integer_write("write_i32");
test("write, seek backwards then take", () => {
const cursor = new WritableResizableBufferCursor(new ResizableBuffer(0), Endianness.Little);
cursor
.write_u32(1)
.write_u32(2)
.write_u32(3)
.write_u32(4);
cursor.seek(-8);
const new_cursor = cursor.take(8);
expect(new_cursor.size).toBe(8);
expect(new_cursor.position).toBe(0);
expect(new_cursor.u32()).toBe(3);
expect(new_cursor.u32()).toBe(4);
});

View File

@ -0,0 +1,106 @@
import { WritableCursor } from "./WritableCursor";
import { ResizableBufferCursor } from "./ResizableBufferCursor";
import { Cursor } from "./Cursor";
import { ASCII_ENCODER } from ".";
export class WritableResizableBufferCursor extends ResizableBufferCursor implements WritableCursor {
get size(): number {
return this._size;
}
set size(size: number) {
if (size > this._size) {
this.ensure_size(size - this._size);
} else {
this._size = size;
}
}
write_u8(value: number): this {
this.ensure_size(1);
this.dv.setUint8(this._position++, value);
return this;
}
write_u16(value: number): this {
this.ensure_size(2);
this.dv.setUint16(this.position, value, this.little_endian);
this._position += 2;
return this;
}
write_u32(value: number): this {
this.ensure_size(4);
this.dv.setUint32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_i32(value: number): this {
this.ensure_size(4);
this.dv.setInt32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_f32(value: number): this {
this.ensure_size(4);
this.dv.setFloat32(this.position, value, this.little_endian);
this._position += 4;
return this;
}
write_u8_array(array: number[]): this {
this.ensure_size(array.length);
new Uint8Array(this.buffer.backing_buffer, this.offset + this.position).set(
new Uint8Array(array)
);
this._position += array.length;
return this;
}
write_cursor(other: Cursor): this {
const size = other.size - other.position;
this.ensure_size(size);
other.copy_to_uint8_array(
new Uint8Array(this.buffer.backing_buffer, this.offset + this.position, size),
size
);
this._position += size;
return this;
}
write_string_ascii(str: string, byte_length: number): this {
this.ensure_size(byte_length);
let i = 0;
for (const byte of ASCII_ENCODER.encode(str)) {
if (i < byte_length) {
this.write_u8(byte);
++i;
}
}
while (i < byte_length) {
this.write_u8(0);
++i;
}
return this;
}
private ensure_size(size: number): void {
const needed = this.position + size - this._size;
if (needed > 0) {
this._size += needed;
if (this.buffer.size < this.offset + this._size) {
this.buffer.size = this.offset + this._size;
}
}
}
}

View File

@ -0,0 +1,10 @@
// TODO: remove dependency on text-encoding because it is no longer maintained.
import { TextDecoder, TextEncoder } from "text-encoding";
export const ASCII_DECODER = new TextDecoder("ascii");
export const UTF_16BE_DECODER = new TextDecoder("utf-16be");
export const UTF_16LE_DECODER = new TextDecoder("utf-16le");
export const ASCII_ENCODER = new TextEncoder("ascii");
export const UTF_16BE_ENCODER = new TextEncoder("utf-16be");
export const UTF_16LE_ENCODER = new TextEncoder("utf-16le");

View File

@ -1,9 +1,11 @@
import { BufferCursor } from "../BufferCursor";
import { Cursor } from "../cursor/Cursor";
import { WritableArrayBufferCursor } from "../cursor/WritableArrayBufferCursor";
import { Endianness } from "..";
/**
* Decrypts the bytes left in cursor.
*/
export function decrypt(key: number, cursor: BufferCursor): BufferCursor {
export function decrypt(key: number, cursor: Cursor): Cursor {
return new PrcDecryptor(key).decrypt(cursor);
}
@ -15,36 +17,48 @@ class PrcDecryptor {
this.construct_keys(key);
}
decrypt(cursor: BufferCursor): BufferCursor {
decrypt(cursor: Cursor): Cursor {
// Size should be divisible by 4.
const actual_size = cursor.bytes_left;
const size = Math.ceil(actual_size / 4) * 4;
const out_cursor = new BufferCursor(size, cursor.little_endian);
const out_cursor = new WritableArrayBufferCursor(
new ArrayBuffer(actual_size),
cursor.endianness
);
for (let pos = 0; pos < size; pos += 4) {
let u32;
if (cursor.bytes_left >= 4) {
u32 = cursor.u32();
out_cursor.write_u32(this.decrypt_u32(u32));
} else {
// If the actual size of the cursor is not divisible by 4, "append" nul bytes until it is.
const left_over = cursor.bytes_left;
u32 = 0;
// Pack left over bytes into a u32.
for (let i = 0; i < left_over; i++) {
if (cursor.little_endian) {
if (cursor.endianness === Endianness.Little) {
u32 |= cursor.u8() << (8 * i);
} else {
u32 |= cursor.u8() << (8 * (3 - i));
}
}
}
out_cursor.write_u32(this.decrypt_u32(u32));
const u32_decrypted = this.decrypt_u32(u32);
// Unpack the decrypted u32 into bytes again.
for (let i = 0; i < left_over; i++) {
if (cursor.endianness === Endianness.Little) {
out_cursor.write_u8((u32_decrypted >>> (8 * i)) & 0xff);
} else {
out_cursor.write_u8((u32_decrypted >>> (8 * (3 - i))) & 0xff);
}
}
}
}
out_cursor.seek_start(0);
out_cursor.size = actual_size;
return out_cursor;
}

View File

@ -0,0 +1,4 @@
export enum Endianness {
Little,
Big,
}

View File

@ -1,4 +1,4 @@
import { BufferCursor } from "../BufferCursor";
import { Cursor } from "../cursor/Cursor";
import { Vec3 } from "../Vec3";
export type CollisionObject = {
@ -16,7 +16,7 @@ export type CollisionTriangle = {
normal: Vec3;
};
export function parse_area_collision_geometry(cursor: BufferCursor): CollisionObject {
export function parse_area_collision_geometry(cursor: Cursor): CollisionObject {
cursor.seek_end(16);
const main_block_offset = cursor.u32();
cursor.seek_start(main_block_offset);

View File

@ -1,4 +1,4 @@
import { BufferCursor } from "../BufferCursor";
import { Cursor } from "../cursor/Cursor";
export type ItemPmt = {
stat_boosts: PmtStatBoost[];
@ -94,7 +94,7 @@ export type PmtTool = {
reserved: number[];
};
export function parse_item_pmt(cursor: BufferCursor): ItemPmt {
export function parse_item_pmt(cursor: Cursor): ItemPmt {
cursor.seek_end(32);
const main_table_offset = cursor.u32();
const main_table_size = cursor.u32();
@ -137,7 +137,7 @@ export function parse_item_pmt(cursor: BufferCursor): ItemPmt {
return item_pmt;
}
function parse_stat_boosts(cursor: BufferCursor, offset: number, size: number): PmtStatBoost[] {
function parse_stat_boosts(cursor: Cursor, offset: number, size: number): PmtStatBoost[] {
cursor.seek_start(offset);
const stat_boosts: PmtStatBoost[] = [];
@ -153,7 +153,7 @@ function parse_stat_boosts(cursor: BufferCursor, offset: number, size: number):
return stat_boosts;
}
function parse_weapons(cursor: BufferCursor, offset: number, size: number): PmtWeapon[] {
function parse_weapons(cursor: Cursor, offset: number, size: number): PmtWeapon[] {
cursor.seek_start(offset);
const weapons: PmtWeapon[] = [];
@ -191,7 +191,7 @@ function parse_weapons(cursor: BufferCursor, offset: number, size: number): PmtW
return weapons;
}
function parse_armors(cursor: BufferCursor, offset: number, size: number): PmtArmor[] {
function parse_armors(cursor: Cursor, offset: number, size: number): PmtArmor[] {
cursor.seek_start(offset);
const armors: PmtArmor[] = [];
@ -224,11 +224,11 @@ function parse_armors(cursor: BufferCursor, offset: number, size: number): PmtAr
return armors;
}
function parse_shields(cursor: BufferCursor, offset: number, size: number): PmtShield[] {
function parse_shields(cursor: Cursor, offset: number, size: number): PmtShield[] {
return parse_armors(cursor, offset, size);
}
function parse_units(cursor: BufferCursor, offset: number, size: number): PmtUnit[] {
function parse_units(cursor: Cursor, offset: number, size: number): PmtUnit[] {
cursor.seek_start(offset);
const units: PmtUnit[] = [];
@ -248,7 +248,7 @@ function parse_units(cursor: BufferCursor, offset: number, size: number): PmtUni
return units;
}
function parse_tools(cursor: BufferCursor, offset: number, size: number): PmtTool[] {
function parse_tools(cursor: Cursor, offset: number, size: number): PmtTool[] {
cursor.seek_start(offset);
const tools: PmtTool[] = [];

View File

@ -1,7 +1,7 @@
import { BufferCursor } from "../../BufferCursor";
import { Vec3 } from "../../Vec3";
import { NjcmModel, parse_njcm_model } from "./njcm";
import { parse_xj_model, XjModel } from "./xj";
import { Cursor } from "../../cursor/Cursor";
// TODO:
// - deal with multiple NJCM chunks
@ -110,17 +110,17 @@ export type NjEvaluationFlags = {
shape_skip: boolean;
};
export function parse_nj(cursor: BufferCursor): NjObject<NjcmModel>[] {
export function parse_nj(cursor: Cursor): NjObject<NjcmModel>[] {
return parse_ninja(cursor, parse_njcm_model, []);
}
export function parse_xj(cursor: BufferCursor): NjObject<XjModel>[] {
export function parse_xj(cursor: Cursor): NjObject<XjModel>[] {
return parse_ninja(cursor, parse_xj_model, undefined);
}
function parse_ninja<M extends NjModel>(
cursor: BufferCursor,
parse_model: (cursor: BufferCursor, context: any) => M,
cursor: Cursor,
parse_model: (cursor: Cursor, context: any) => M,
context: any
): NjObject<M>[] {
while (cursor.bytes_left) {
@ -146,8 +146,8 @@ function parse_ninja<M extends NjModel>(
// TODO: cache model and object offsets so we don't reparse the same data.
function parse_sibling_objects<M extends NjModel>(
cursor: BufferCursor,
parse_model: (cursor: BufferCursor, context: any) => M,
cursor: Cursor,
parse_model: (cursor: Cursor, context: any) => M,
context: any
): NjObject<M>[] {
const eval_flags = cursor.u32();

View File

@ -1,5 +1,5 @@
import { BufferCursor } from "../../BufferCursor";
import { Vec3 } from "../../Vec3";
import { Cursor } from "../../cursor/Cursor";
const ANGLE_TO_RAD = (2 * Math.PI) / 0xffff;
@ -65,7 +65,7 @@ export type NjKeyframeA = {
value: Vec3; // Euler angles in radians.
};
export function parse_njm(cursor: BufferCursor, bone_count: number): NjMotion {
export function parse_njm(cursor: Cursor, bone_count: number): NjMotion {
if (cursor.string_ascii(4, false, true) === "NMDM") {
return parse_njm_v2(cursor, bone_count);
} else {
@ -77,7 +77,7 @@ export function parse_njm(cursor: BufferCursor, bone_count: number): NjMotion {
/**
* Format used by PSO v2 and for the enemies in PSO:BB.
*/
function parse_njm_v2(cursor: BufferCursor, bone_count: number): NjMotion {
function parse_njm_v2(cursor: Cursor, bone_count: number): NjMotion {
const chunk_size = cursor.u32();
return parse_motion(cursor.take(chunk_size), bone_count);
}
@ -85,7 +85,7 @@ function parse_njm_v2(cursor: BufferCursor, bone_count: number): NjMotion {
/**
* Format used by PSO:BB plymotiondata.rlc.
*/
function parse_njm_bb(cursor: BufferCursor, bone_count: number): NjMotion {
function parse_njm_bb(cursor: Cursor, bone_count: number): NjMotion {
cursor.seek_end(16);
const offset1 = cursor.u32();
cursor.seek_start(offset1);
@ -94,14 +94,14 @@ function parse_njm_bb(cursor: BufferCursor, bone_count: number): NjMotion {
return parse_action(cursor, bone_count);
}
function parse_action(cursor: BufferCursor, bone_count: number): NjMotion {
function parse_action(cursor: Cursor, bone_count: number): NjMotion {
cursor.seek(4); // Object pointer placeholder.
const motion_offset = cursor.u32();
cursor.seek_start(motion_offset);
return parse_motion(cursor, bone_count);
}
function parse_motion(cursor: BufferCursor, bone_count: number): NjMotion {
function parse_motion(cursor: Cursor, bone_count: number): NjMotion {
// Points to an array the size of bone_count.
let mdata_offset = cursor.u32();
const frame_count = cursor.u32();
@ -184,7 +184,7 @@ function parse_motion(cursor: BufferCursor, bone_count: number): NjMotion {
};
}
function parse_motion_data_f(cursor: BufferCursor, count: number): NjKeyframeF[] {
function parse_motion_data_f(cursor: Cursor, count: number): NjKeyframeF[] {
const frames: NjKeyframeF[] = [];
for (let i = 0; i < count; ++i) {
@ -198,7 +198,7 @@ function parse_motion_data_f(cursor: BufferCursor, count: number): NjKeyframeF[]
}
function parse_motion_data_a(
cursor: BufferCursor,
cursor: Cursor,
keyframe_count: number,
frame_count: number
): NjKeyframeA[] {
@ -230,7 +230,7 @@ function parse_motion_data_a(
return frames;
}
function parse_motion_data_a_wide(cursor: BufferCursor, keyframe_count: number): NjKeyframeA[] {
function parse_motion_data_a_wide(cursor: Cursor, keyframe_count: number): NjKeyframeA[] {
const frames: NjKeyframeA[] = [];
for (let i = 0; i < keyframe_count; ++i) {

View File

@ -1,7 +1,7 @@
import Logger from "js-logger";
import { BufferCursor } from "../../BufferCursor";
import { Vec3 } from "../../Vec3";
import { NjVertex } from ".";
import { Cursor } from "../../cursor/Cursor";
import { Vec3 } from "../../Vec3";
const logger = Logger.get("data_formats/parsing/ninja/njcm");
@ -129,7 +129,7 @@ type NjcmMeshVertex = {
normal?: Vec3;
};
export function parse_njcm_model(cursor: BufferCursor, cached_chunk_offsets: number[]): NjcmModel {
export function parse_njcm_model(cursor: Cursor, cached_chunk_offsets: number[]): NjcmModel {
const vlist_offset = cursor.u32(); // Vertex list
const plist_offset = cursor.u32(); // Triangle strip index list
const bounding_sphere_center = new Vec3(cursor.f32(), cursor.f32(), cursor.f32());
@ -176,7 +176,7 @@ export function parse_njcm_model(cursor: BufferCursor, cached_chunk_offsets: num
// TODO: don't reparse when DrawPolygonList chunk is encountered.
function parse_chunks(
cursor: BufferCursor,
cursor: Cursor,
cached_chunk_offsets: number[],
wide_end_chunks: boolean
): NjcmChunk[] {
@ -278,11 +278,7 @@ function parse_chunks(
return chunks;
}
function parse_vertex_chunk(
cursor: BufferCursor,
chunk_type_id: number,
flags: number
): NjcmVertex[] {
function parse_vertex_chunk(cursor: Cursor, chunk_type_id: number, flags: number): NjcmVertex[] {
if (chunk_type_id < 32 || chunk_type_id > 50) {
logger.warn(`Unknown vertex chunk type ${chunk_type_id}.`);
return [];
@ -371,7 +367,7 @@ function parse_vertex_chunk(
}
function parse_triangle_strip_chunk(
cursor: BufferCursor,
cursor: Cursor,
chunk_type_id: number,
flags: number
): NjcmTriangleStrip[] {

View File

@ -1,4 +1,4 @@
import { BufferCursor } from "../../BufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { Vec3 } from "../../Vec3";
import { NjVertex } from "../ninja";
@ -18,7 +18,7 @@ export type XjTriangleStrip = {
indices: number[];
};
export function parse_xj_model(cursor: BufferCursor): XjModel {
export function parse_xj_model(cursor: Cursor): XjModel {
cursor.seek(4); // Flags according to QEdit, seemingly always 0.
const vertex_info_list_offset = cursor.u32();
cursor.seek(4); // Seems to be the vertexInfoCount, always 1.
@ -84,7 +84,7 @@ export function parse_xj_model(cursor: BufferCursor): XjModel {
}
function parse_triangle_strip_list(
cursor: BufferCursor,
cursor: Cursor,
triangle_strip_list_offset: number,
triangle_strip_count: number
): XjTriangleStrip[] {

View File

@ -1,14 +1,14 @@
import { BufferCursor } from "../BufferCursor";
import { decrypt } from "../encryption/prc";
import { decompress } from "../compression/prs";
import Logger from "js-logger";
import { decompress } from "../compression/prs";
import { Cursor } from "../cursor/Cursor";
import { decrypt } from "../encryption/prc";
const logger = Logger.get("data_formats/parsing/prc");
/**
* Decrypts and decompresses a .prc file.
*/
export function parse_prc(cursor: BufferCursor): BufferCursor {
export function parse_prc(cursor: Cursor): Cursor {
// Unencrypted, decompressed size.
const size = cursor.u32();
let key = cursor.u32();

View File

@ -1,15 +1,17 @@
import * as fs from "fs";
import { BufferCursor } from "../../BufferCursor";
import * as prs from "../../compression/prs";
import { parse_bin, write_bin } from "./bin";
import { Endianness } from "../..";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
/**
* Parse a file, convert the resulting structure to BIN again and check whether the end result is equal to the original.
*/
test("parse_bin and write_bin", () => {
const orig_buffer = fs.readFileSync("test/resources/quest118_e.bin").buffer;
const orig_bin = prs.decompress(new BufferCursor(orig_buffer, true));
const test_bin = write_bin(parse_bin(orig_bin));
const orig_buffer = fs.readFileSync("test/resources/quest118_e.bin");
const orig_bin = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_bin = new ArrayBufferCursor(write_bin(parse_bin(orig_bin)), Endianness.Little);
orig_bin.seek_start(0);
expect(test_bin.size).toBe(orig_bin.size);

View File

@ -1,5 +1,5 @@
import { BufferCursor } from "../../BufferCursor";
import Logger from "js-logger";
import { Cursor } from "../../cursor/Cursor";
const logger = Logger.get("data_formats/parsing/quest/bin");
@ -11,10 +11,10 @@ export interface BinFile {
long_description: string;
function_offsets: number[];
instructions: Instruction[];
data: BufferCursor;
data: ArrayBuffer;
}
export function parse_bin(cursor: BufferCursor, lenient: boolean = false): BinFile {
export function parse_bin(cursor: Cursor, lenient: boolean = false): BinFile {
const object_code_offset = cursor.u32();
const function_offset_table_offset = cursor.u32(); // Relative offsets
const size = cursor.u32();
@ -53,12 +53,12 @@ export function parse_bin(cursor: BufferCursor, lenient: boolean = false): BinFi
long_description,
function_offsets,
instructions,
data: cursor.seek_start(0).take(cursor.size),
data: cursor.seek_start(0).array_buffer(),
};
}
export function write_bin({ data }: { data: BufferCursor }): BufferCursor {
return data.seek_start(0);
export function write_bin({ data }: { data: ArrayBuffer }): ArrayBuffer {
return data;
}
export interface Instruction {
@ -68,7 +68,7 @@ export interface Instruction {
size: number;
}
function parse_object_code(cursor: BufferCursor, lenient: boolean): Instruction[] {
function parse_object_code(cursor: Cursor, lenient: boolean): Instruction[] {
const instructions = [];
try {
@ -148,10 +148,7 @@ function parse_object_code(cursor: BufferCursor, lenient: boolean): Instruction[
return instructions;
}
function parse_instruction_arguments(
cursor: BufferCursor,
mask: string
): { args: any[]; size: number } {
function parse_instruction_arguments(cursor: Cursor, mask: string): { args: any[]; size: number } {
const old_pos = cursor.position;
const args = [];
let args_size: number;

View File

@ -1,15 +1,17 @@
import * as fs from "fs";
import { BufferCursor } from "../../BufferCursor";
import { Endianness } from "../..";
import * as prs from "../../compression/prs";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { parse_dat, write_dat } from "./dat";
/**
* Parse a file, convert the resulting structure to DAT again and check whether the end result is equal to the original.
*/
test("parse_dat and write_dat", () => {
const orig_buffer = fs.readFileSync("test/resources/quest118_e.dat").buffer;
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, true));
const test_dat = write_dat(parse_dat(orig_dat));
const orig_buffer = fs.readFileSync("test/resources/quest118_e.dat");
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_dat = new ResizableBufferCursor(write_dat(parse_dat(orig_dat)), Endianness.Little);
orig_dat.seek_start(0);
expect(test_dat.size).toBe(orig_dat.size);
@ -30,8 +32,8 @@ test("parse_dat and write_dat", () => {
* Parse a file, modify the resulting structure, convert it to DAT again and check whether the end result is equal to the original except for the bytes that should be changed.
*/
test("parse, modify and write DAT", () => {
const orig_buffer = fs.readFileSync("./test/resources/quest118_e.dat").buffer;
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, true));
const orig_buffer = fs.readFileSync("./test/resources/quest118_e.dat");
const orig_dat = prs.decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_parsed = parse_dat(orig_dat);
orig_dat.seek_start(0);
@ -39,7 +41,7 @@ test("parse, modify and write DAT", () => {
test_parsed.objs[9].position.y = 17;
test_parsed.objs[9].position.z = 19;
const test_dat = write_dat(test_parsed);
const test_dat = new ResizableBufferCursor(write_dat(test_parsed), Endianness.Little);
expect(test_dat.size).toBe(orig_dat.size);

View File

@ -1,6 +1,9 @@
import { groupBy } from "lodash";
import { BufferCursor } from "../../BufferCursor";
import Logger from "js-logger";
import { groupBy } from "lodash";
import { Endianness } from "../..";
import { Cursor } from "../../cursor/Cursor";
import { WritableResizableBufferCursor } from "../../cursor/WritableResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
import { Vec3 } from "../../Vec3";
const logger = Logger.get("data_formats/parsing/quest/dat");
@ -38,7 +41,7 @@ export type DatUnknown = {
data: number[];
};
export function parse_dat(cursor: BufferCursor): DatFile {
export function parse_dat(cursor: Cursor): DatFile {
const objs: DatObject[] = [];
const npcs: DatNpc[] = [];
const unknowns: DatUnknown[] = [];
@ -154,11 +157,13 @@ export function parse_dat(cursor: BufferCursor): DatFile {
return { objs, npcs, unknowns };
}
export function write_dat({ objs, npcs, unknowns }: DatFile): BufferCursor {
const cursor = new BufferCursor(
objs.length * OBJECT_SIZE + npcs.length * NPC_SIZE + unknowns.length * 1000,
true
export function write_dat({ objs, npcs, unknowns }: DatFile): ResizableBuffer {
const buffer = new ResizableBuffer(
objs.length * (16 + OBJECT_SIZE) +
npcs.length * (16 + NPC_SIZE) +
unknowns.reduce((a, b) => a + b.total_size, 0)
);
const cursor = new WritableResizableBufferCursor(buffer, Endianness.Little);
const grouped_objs = groupBy(objs, obj => obj.area_id);
const obj_area_ids = Object.keys(grouped_objs)
@ -234,7 +239,5 @@ export function write_dat({ objs, npcs, unknowns }: DatFile): BufferCursor {
cursor.write_u32(0);
cursor.write_u32(0);
cursor.seek_start(0);
return cursor;
return buffer;
}

View File

@ -1,11 +1,13 @@
import * as fs from "fs";
import { BufferCursor } from "../../BufferCursor";
import { parse_quest, write_quest_qst } from "../quest";
import { ObjectType, Quest } from "../../../domain";
import { parse_quest, write_quest_qst } from "../quest";
import { Endianness } from "../..";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
test("parse Towards the Future", () => {
const buffer = fs.readFileSync("test/resources/quest118_e.qst").buffer;
const cursor = new BufferCursor(buffer, true);
const buffer = fs.readFileSync("test/resources/quest118_e.qst");
const cursor = new BufferCursor(buffer, Endianness.Little);
const quest = parse_quest(cursor)!;
expect(quest.name).toBe("Towards the Future");
@ -33,14 +35,17 @@ test("parse Towards the Future", () => {
});
/**
* Roundtrip test.
* Parse a QST file, write the resulting Quest object to QST again, then parse that again.
* Then check whether the two Quest objects are equal.
*/
test("parse_quest and write_quest_qst", () => {
const buffer = fs.readFileSync("test/resources/tethealla_v0.143_quests/solo/ep1/02.qst").buffer;
const cursor = new BufferCursor(buffer, true);
const buffer = fs.readFileSync("test/resources/tethealla_v0.143_quests/solo/ep1/02.qst");
const cursor = new BufferCursor(buffer, Endianness.Little);
const orig_quest = parse_quest(cursor)!;
const test_quest = parse_quest(write_quest_qst(orig_quest, "02.qst"))!;
const test_quest = parse_quest(
new ArrayBufferCursor(write_quest_qst(orig_quest, "02.qst"), Endianness.Little)
)!;
expect(test_quest.name).toBe(orig_quest.name);
expect(test_quest.short_description).toBe(orig_quest.short_description);

View File

@ -1,8 +1,11 @@
import Logger from "js-logger";
import { Endianness } from "../..";
import { AreaVariant, NpcType, ObjectType, Quest, QuestNpc, QuestObject } from "../../../domain";
import { area_store } from "../../../stores/AreaStore";
import { BufferCursor } from "../../BufferCursor";
import * as prs from "../../compression/prs";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";
import { Vec3 } from "../../Vec3";
import { Instruction, parse_bin, write_bin } from "./bin";
import { DatFile, DatNpc, DatObject, parse_dat, write_dat } from "./dat";
@ -15,7 +18,7 @@ const logger = Logger.get("data_formats/parsing/quest");
*
* Always delegates to parseQst at the moment.
*/
export function parse_quest(cursor: BufferCursor, lenient: boolean = false): Quest | undefined {
export function parse_quest(cursor: Cursor, lenient: boolean = false): Quest | undefined {
const qst = parse_qst(cursor);
if (!qst) {
@ -47,8 +50,11 @@ export function parse_quest(cursor: BufferCursor, lenient: boolean = false): Que
return;
}
const dat = parse_dat(prs.decompress(dat_file.data));
const bin = parse_bin(prs.decompress(bin_file.data), lenient);
const dat = parse_dat(prs.decompress(new ArrayBufferCursor(dat_file.data, Endianness.Little)));
const bin = parse_bin(
prs.decompress(new ArrayBufferCursor(bin_file.data, Endianness.Little)),
lenient
);
let episode = 1;
let area_variants: AreaVariant[] = [];
@ -79,7 +85,7 @@ export function parse_quest(cursor: BufferCursor, lenient: boolean = false): Que
);
}
export function write_quest_qst(quest: Quest, file_name: string): BufferCursor {
export function write_quest_qst(quest: Quest, file_name: string): ArrayBuffer {
const dat = write_dat({
objs: objects_to_dat_data(quest.objects),
npcs: npcsToDatData(quest.npcs),
@ -94,12 +100,14 @@ export function write_quest_qst(quest: Quest, file_name: string): BufferCursor {
{
name: base_file_name + ".dat",
id: quest.id,
data: prs.compress(dat),
data: prs
.compress(new ResizableBufferCursor(dat, Endianness.Little))
.array_buffer(),
},
{
name: base_file_name + ".bin",
id: quest.id,
data: prs.compress(bin),
data: prs.compress(new ArrayBufferCursor(bin, Endianness.Little)).array_buffer(),
},
],
});

View File

@ -1,17 +1,19 @@
import { BufferCursor } from "../../BufferCursor";
import { parse_qst, write_qst } from "./qst";
import { walk_qst_files } from "../../../../test/src/utils";
import { parse_qst, write_qst } from "./qst";
import { Endianness } from "../..";
import { BufferCursor } from "../../cursor/BufferCursor";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
/**
* Parse a file, convert the resulting structure to QST again and check whether the end result is equal to the original.
*/
test("parse_qst and write_qst", () => {
walk_qst_files((_file_path, _file_name, file_content) => {
const orig_qst = new BufferCursor(file_content.buffer, true);
const orig_qst = new BufferCursor(file_content, Endianness.Little);
const orig_quest = parse_qst(orig_qst);
if (orig_quest) {
const test_qst = write_qst(orig_quest);
const test_qst = new ArrayBufferCursor(write_qst(orig_quest), Endianness.Little);
orig_qst.seek_start(0);
expect(test_qst.size).toBe(orig_qst.size);

View File

@ -1,5 +1,11 @@
import { BufferCursor } from "../../BufferCursor";
import Logger from "js-logger";
import { Cursor } from "../../cursor/Cursor";
import { WritableArrayBufferCursor } from "../../cursor/WritableArrayBufferCursor";
import { Endianness } from "../..";
import { WritableCursor } from "../../cursor/WritableCursor";
import { WritableResizableBufferCursor } from "../../cursor/WritableResizableBufferCursor";
import { ResizableBuffer } from "../../ResizableBuffer";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
const logger = Logger.get("data_formats/parsing/quest/qst");
@ -7,9 +13,7 @@ export type QstContainedFile = {
id?: number;
name: string;
name_2?: string; // Unsure what this is
expected_size?: number;
data: BufferCursor;
chunk_nos: Set<number>;
data: ArrayBuffer;
};
export type ParseQstResult = {
@ -21,7 +25,7 @@ export type ParseQstResult = {
* Low level parsing function for .qst files.
* Can only read the Blue Burst format.
*/
export function parse_qst(cursor: BufferCursor): ParseQstResult | undefined {
export function parse_qst(cursor: Cursor): ParseQstResult | undefined {
// A .qst file contains two 88-byte headers that describe the embedded .dat and .bin files.
let version = "PC";
@ -67,27 +71,28 @@ export function parse_qst(cursor: BufferCursor): ParseQstResult | undefined {
}
}
export type SimpleQstContainedFile = {
export type QstContainedFileParam = {
id?: number;
name: string;
name_2?: string;
data: BufferCursor;
data: ArrayBuffer;
};
export type WriteQstParams = {
version?: string;
files: SimpleQstContainedFile[];
files: QstContainedFileParam[];
};
/**
* Always uses Blue Burst format.
*/
export function write_qst(params: WriteQstParams): BufferCursor {
export function write_qst(params: WriteQstParams): ArrayBuffer {
const files = params.files;
const total_size = files
.map(f => 88 + Math.ceil(f.data.size / 1024) * 1056)
.map(f => 88 + Math.ceil(f.data.byteLength / 1024) * 1056)
.reduce((a, b) => a + b);
const cursor = new BufferCursor(total_size, true);
const buffer = new ArrayBuffer(total_size);
const cursor = new WritableArrayBufferCursor(buffer, Endianness.Little);
write_file_headers(cursor, files);
write_file_chunks(cursor, files);
@ -96,7 +101,7 @@ export function write_qst(params: WriteQstParams): BufferCursor {
throw new Error(`Expected a final file size of ${total_size}, but got ${cursor.size}.`);
}
return cursor.seek_start(0);
return buffer;
}
type QstHeader = {
@ -109,7 +114,7 @@ type QstHeader = {
/**
* TODO: Read all headers instead of just the first 2.
*/
function parse_headers(cursor: BufferCursor): QstHeader[] {
function parse_headers(cursor: Cursor): QstHeader[] {
const headers: QstHeader[] = [];
for (let i = 0; i < 2; ++i) {
@ -132,13 +137,18 @@ function parse_headers(cursor: BufferCursor): QstHeader[] {
return headers;
}
function parse_files(
cursor: BufferCursor,
expected_sizes: Map<string, number>
): QstContainedFile[] {
function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstContainedFile[] {
// Files are interleaved in 1056 byte chunks.
// Each chunk has a 24 byte header, 1024 byte data segment and an 8 byte trailer.
const files = new Map<string, QstContainedFile>();
const files = new Map<
string,
{
name: string;
expected_size?: number;
cursor: WritableCursor;
chunk_nos: Set<number>;
}
>();
while (cursor.bytes_left >= 1056) {
const start_position = cursor.position;
@ -156,7 +166,10 @@ function parse_files(
(file = {
name: file_name,
expected_size,
data: new BufferCursor(expected_size || 10 * 1024, true),
cursor: new WritableResizableBufferCursor(
new ResizableBuffer(expected_size || 10 * 1024),
Endianness.Little
),
chunk_nos: new Set(),
})
);
@ -183,8 +196,8 @@ function parse_files(
const data = cursor.take(size);
const chunk_position = chunk_no * 1024;
file.data.size = Math.max(chunk_position + size, file.data.size);
file.data.seek_start(chunk_position).write_cursor(data);
file.cursor.size = Math.max(chunk_position + size, file.cursor.size);
file.cursor.seek_start(chunk_position).write_cursor(data);
// Skip the padding and the trailer.
cursor.seek(1032 - data.size);
@ -203,18 +216,18 @@ function parse_files(
for (const file of files.values()) {
// Clean up file properties.
file.data.seek_start(0);
file.cursor.seek_start(0);
file.chunk_nos = new Set(Array.from(file.chunk_nos.values()).sort((a, b) => a - b));
// Check whether the expected size was correct.
if (file.expected_size != null && file.data.size !== file.expected_size) {
if (file.expected_size != null && file.cursor.size !== file.expected_size) {
logger.warn(
`File ${file.name} has an actual size of ${file.data.size} instead of the expected size ${file.expected_size}.`
`File ${file.name} has an actual size of ${file.cursor.size} instead of the expected size ${file.expected_size}.`
);
}
// Detect missing file chunks.
const actual_size = Math.max(file.data.size, file.expected_size || 0);
const actual_size = Math.max(file.cursor.size, file.expected_size || 0);
for (let chunk_no = 0; chunk_no < Math.ceil(actual_size / 1024); ++chunk_no) {
if (!file.chunk_nos.has(chunk_no)) {
@ -223,10 +236,19 @@ function parse_files(
}
}
return Array.from(files.values());
const contained_files: QstContainedFile[] = [];
for (const file of files.values()) {
contained_files.push({
name: file.name,
data: file.cursor.seek_start(0).array_buffer(),
});
}
return contained_files;
}
function write_file_headers(cursor: BufferCursor, files: SimpleQstContainedFile[]): void {
function write_file_headers(cursor: WritableCursor, files: QstContainedFileParam[]): void {
for (const file of files) {
if (file.name.length > 16) {
throw Error(`File ${file.name} has a name longer than 16 characters.`);
@ -241,7 +263,7 @@ function write_file_headers(cursor: BufferCursor, files: SimpleQstContainedFile[
}
cursor.write_string_ascii(file.name, 16);
cursor.write_u32(file.data.size);
cursor.write_u32(file.data.byteLength);
let file_name_2: string;
@ -266,20 +288,22 @@ function write_file_headers(cursor: BufferCursor, files: SimpleQstContainedFile[
}
}
function write_file_chunks(cursor: BufferCursor, files: SimpleQstContainedFile[]): void {
function write_file_chunks(cursor: WritableCursor, files: QstContainedFileParam[]): void {
// Files are interleaved in 1056 byte chunks.
// Each chunk has a 24 byte header, 1024 byte data segment and an 8 byte trailer.
files = files.slice();
const chunk_nos = new Array(files.length).fill(0);
const chunks = files.map(file => ({
no: 0,
data: new ArrayBufferCursor(file.data, Endianness.Little),
name: file.name,
}));
while (files.length) {
while (chunks.length) {
let i = 0;
while (i < files.length) {
if (!write_file_chunk(cursor, files[i].data, chunk_nos[i]++, files[i].name)) {
while (i < chunks.length) {
if (!write_file_chunk(cursor, chunks[i].data, chunks[i].no++, chunks[i].name)) {
// Remove if there are no more chunks to write.
files.splice(i, 1);
chunk_nos.splice(i, 1);
chunks.splice(i, 1);
} else {
++i;
}
@ -291,8 +315,8 @@ function write_file_chunks(cursor: BufferCursor, files: SimpleQstContainedFile[]
* @returns true if there are bytes left to write in data, false otherwise.
*/
function write_file_chunk(
cursor: BufferCursor,
data: BufferCursor,
cursor: WritableCursor,
data: Cursor,
chunk_no: number,
name: string
): boolean {

View File

@ -1,5 +1,6 @@
import { BufferCursor } from "../BufferCursor";
import Logger from "js-logger";
import { Endianness } from "..";
import { Cursor } from "../cursor/Cursor";
import { parse_prc } from "./prc";
const logger = Logger.get("data_formats/parsing/rlc");
@ -10,7 +11,7 @@ const MARKER = "RelChunkVer0.20";
*
* @returns the contained files, decrypted and decompressed.
*/
export function parse_rlc(cursor: BufferCursor): BufferCursor[] {
export function parse_rlc(cursor: Cursor): Cursor[] {
const marker = cursor.string_ascii(16, true, true);
if (marker !== MARKER) {
@ -20,7 +21,7 @@ export function parse_rlc(cursor: BufferCursor): BufferCursor[] {
const table_size = cursor.u32();
cursor.seek(12);
const files: BufferCursor[] = [];
const files: Cursor[] = [];
for (let i = 0; i < table_size; ++i) {
const offset = cursor.u32();
@ -30,7 +31,7 @@ export function parse_rlc(cursor: BufferCursor): BufferCursor[] {
cursor.seek_start(offset);
const file = cursor.take(size);
file.little_endian = true;
file.endianness = Endianness.Little;
files.push(parse_prc(file));
cursor.seek_start(prev_pos);

View File

@ -1,9 +1,9 @@
import { BufferCursor } from "../BufferCursor";
import { decompress } from "../compression/prs";
import { Cursor } from "../cursor/Cursor";
export type Unitxt = string[][];
export function parse_unitxt(buf: BufferCursor, compressed: boolean = true): Unitxt {
export function parse_unitxt(buf: Cursor, compressed: boolean = true): Unitxt {
if (compressed) {
buf = decompress(buf);
}

View File

@ -1,12 +1,11 @@
import { computed, observable } from "mobx";
import { Object3D } from "three";
import { BufferCursor } from "../data_formats/BufferCursor";
import { DatNpc, DatObject, DatUnknown } from "../data_formats/parsing/quest/dat";
import { NpcType } from "./NpcType";
import { ObjectType } from "./ObjectType";
import { Vec3 } from "../data_formats/Vec3";
import { enum_values } from "../enums";
import { ItemType } from "./items";
import { Vec3 } from "../data_formats/Vec3";
import { NpcType } from "./NpcType";
import { ObjectType } from "./ObjectType";
export * from "./items";
export * from "./NpcType";
@ -100,7 +99,7 @@ export class Quest {
/**
* (Partial) raw BIN data that can't be parsed yet by Phantasmal.
*/
bin_data: BufferCursor;
bin_data: ArrayBuffer;
constructor(
id: number | undefined,
@ -112,7 +111,7 @@ export class Quest {
objects: QuestObject[],
npcs: QuestNpc[],
dat_unknowns: DatUnknown[],
bin_data: BufferCursor
bin_data: ArrayBuffer
) {
if (id != null && (!Number.isInteger(id) || id < 0))
throw new Error("id should be undefined or a non-negative integer.");

31
src/enums.test.ts Normal file
View File

@ -0,0 +1,31 @@
import { enum_values } from "./enums";
enum Test {
TestA,
TestB,
TestC,
}
enum TestString {
TestA,
TestB,
TestC,
}
test("enum_values of integer enum", () => {
const values = enum_values(Test);
expect(values.length).toBe(3);
expect(values[0]).toBe(Test.TestA);
expect(values[1]).toBe(Test.TestB);
expect(values[2]).toBe(Test.TestC);
});
test("enum_values of string enum", () => {
const values = enum_values(TestString);
expect(values.length).toBe(3);
expect(values[0]).toBe(TestString.TestA);
expect(values[1]).toBe(TestString.TestB);
expect(values[2]).toBe(TestString.TestC);
});

View File

@ -9,10 +9,6 @@ export function enum_values<E>(e: any): E[] {
}
}
export function enum_names(e: any): string[] {
return Object.keys(e).filter(k => typeof (e as any)[k] === "string");
}
/**
* Map with a guaranteed value per enum key.
*/

View File

@ -1,10 +1,11 @@
import { Object3D } from "three";
import { BufferCursor } from "../data_formats/BufferCursor";
import { parse_area_collision_geometry } from "../data_formats/parsing/area_collision_geometry";
import { parse_area_geometry } from "../data_formats/parsing/area_geometry";
import { Area, AreaVariant, Section } from "../domain";
import { area_collision_geometry_to_object_3d } from "../rendering/areas";
import { get_area_collision_data, get_area_render_data } from "./binary_assets";
import { Endianness } from "../data_formats";
import { ArrayBufferCursor } from "../data_formats/cursor/ArrayBufferCursor";
function area(id: number, name: string, order: number, variants: number): Area {
const area = new Area(id, name, order, []);
@ -142,7 +143,7 @@ class AreaStore {
} else {
const object_3d = get_area_collision_data(episode, area_id, area_variant).then(buffer =>
area_collision_geometry_to_object_3d(
parse_area_collision_geometry(new BufferCursor(buffer, true))
parse_area_collision_geometry(new ArrayBufferCursor(buffer, Endianness.Little))
)
);
collision_geometry_cache.set(`${area_id}-${area_variant}`, object_3d);

View File

@ -1,9 +1,10 @@
import { BufferGeometry, CylinderBufferGeometry } from "three";
import { NpcType, ObjectType } from "../domain";
import { BufferCursor } from "../data_formats/BufferCursor";
import { get_npc_data, get_object_data } from "./binary_assets";
import { ninja_object_to_buffer_geometry } from "../rendering/models";
import { parse_nj, parse_xj } from "../data_formats/parsing/ninja";
import { NpcType, ObjectType } from "../domain";
import { ninja_object_to_buffer_geometry } from "../rendering/models";
import { get_npc_data, get_object_data } from "./binary_assets";
import { Endianness } from "../data_formats";
import { ArrayBufferCursor } from "../data_formats/cursor/ArrayBufferCursor";
const DEFAULT_ENTITY = new CylinderBufferGeometry(3, 3, 20);
DEFAULT_ENTITY.translate(0, 10, 0);
@ -26,7 +27,7 @@ class EntityStore {
return mesh;
} else {
mesh = get_npc_data(npc_type).then(({ url, data }) => {
const cursor = new BufferCursor(data, true);
const cursor = new ArrayBufferCursor(data, Endianness.Little);
const nj_objects = url.endsWith(".nj") ? parse_nj(cursor) : parse_xj(cursor);
if (nj_objects.length) {
@ -48,7 +49,7 @@ class EntityStore {
return geometry;
} else {
geometry = get_object_data(object_type).then(({ url, data }) => {
const cursor = new BufferCursor(data, true);
const cursor = new ArrayBufferCursor(data, Endianness.Little);
const nj_objects = url.endsWith(".nj") ? parse_nj(cursor) : parse_xj(cursor);
if (nj_objects.length) {

View File

@ -1,13 +1,14 @@
import Logger from "js-logger";
import { action, observable } from "mobx";
import { AnimationAction, AnimationClip, AnimationMixer, SkinnedMesh } from "three";
import { BufferCursor } from "../data_formats/BufferCursor";
import { NjModel, NjObject, parse_nj, parse_xj } from "../data_formats/parsing/ninja";
import { parse_njm, NjMotion } from "../data_formats/parsing/ninja/motion";
import { PlayerModel, PlayerAnimation } from "../domain";
import { NjMotion, parse_njm } from "../data_formats/parsing/ninja/motion";
import { PlayerAnimation, PlayerModel } from "../domain";
import { create_animation_clip, PSO_FRAME_RATE } from "../rendering/animation";
import { ninja_object_to_skinned_mesh } from "../rendering/models";
import { get_player_data, get_player_animation_data } from "./binary_assets";
import { get_player_animation_data, get_player_data } from "./binary_assets";
import { ArrayBufferCursor } from "../data_formats/cursor/ArrayBufferCursor";
import { Endianness } from "../data_formats";
const logger = Logger.get("stores/ModelViewerStore");
const nj_object_cache: Map<string, Promise<NjObject<NjModel>>> = new Map();
@ -154,18 +155,17 @@ class ModelViewerStore {
return;
}
const cursor = new ArrayBufferCursor(reader.result, Endianness.Little);
if (file.name.endsWith(".nj")) {
const model = parse_nj(new BufferCursor(reader.result, true))[0];
const model = parse_nj(cursor)[0];
this.set_model(model);
} else if (file.name.endsWith(".xj")) {
const model = parse_xj(new BufferCursor(reader.result, true))[0];
const model = parse_xj(cursor)[0];
this.set_model(model);
} else if (file.name.endsWith(".njm")) {
if (this.current_model) {
const njm = parse_njm(
new BufferCursor(reader.result, true),
this.current_bone_count
);
const njm = parse_njm(cursor, this.current_bone_count);
this.set_animation(create_animation_clip(this.current_model, njm));
}
} else {
@ -201,14 +201,14 @@ class ModelViewerStore {
private async get_all_assets(model: PlayerModel): Promise<NjObject<NjModel>> {
const body_data = await get_player_data(model.name, "Body");
const body = parse_nj(new BufferCursor(body_data, true))[0];
const body = parse_nj(new ArrayBufferCursor(body_data, Endianness.Little))[0];
if (!body) {
throw new Error(`Couldn't parse body for player class ${model.name}.`);
}
const head_data = await get_player_data(model.name, "Head", 0);
const head = parse_nj(new BufferCursor(head_data, true))[0];
const head = parse_nj(new ArrayBufferCursor(head_data, Endianness.Little))[0];
if (head) {
this.add_to_bone(body, head, 59);
@ -216,7 +216,7 @@ class ModelViewerStore {
if (model.hair_styles_count > 0) {
const hair_data = await get_player_data(model.name, "Hair", 0);
const hair = parse_nj(new BufferCursor(hair_data, true))[0];
const hair = parse_nj(new ArrayBufferCursor(hair_data, Endianness.Little))[0];
if (hair) {
this.add_to_bone(body, hair, 59);
@ -224,7 +224,9 @@ class ModelViewerStore {
if (model.hair_styles_with_accessory.has(0)) {
const accessory_data = await get_player_data(model.name, "Accessory", 0);
const accessory = parse_nj(new BufferCursor(accessory_data, true))[0];
const accessory = parse_nj(
new ArrayBufferCursor(accessory_data, Endianness.Little)
)[0];
if (accessory) {
this.add_to_bone(body, accessory, 59);
@ -242,7 +244,10 @@ class ModelViewerStore {
return nj_motion;
} else {
nj_motion = get_player_animation_data(animation.id).then(motion_data =>
parse_njm(new BufferCursor(motion_data, true), this.current_bone_count)
parse_njm(
new ArrayBufferCursor(motion_data, Endianness.Little),
this.current_bone_count
)
);
nj_motion_cache.set(animation.id, nj_motion);

View File

@ -1,15 +1,13 @@
import Logger from "js-logger";
import { action, observable } from "mobx";
import { BufferCursor } from "../data_formats/BufferCursor";
import { parse_quest, write_quest_qst } from "../data_formats/parsing/quest";
import { Area, Quest, QuestEntity, Section } from "../domain";
import { Vec3 } from "../data_formats/Vec3";
import {
create_npc_mesh as create_npc_object_3d,
create_object_mesh as create_object_object_3d,
} from "../rendering/entities";
import { Area, Quest, QuestEntity, Section } from "../domain";
import { create_npc_mesh, create_object_mesh } from "../rendering/entities";
import { area_store } from "./AreaStore";
import { entity_store } from "./EntityStore";
import { ArrayBufferCursor } from "../data_formats/cursor/ArrayBufferCursor";
import { Endianness } from "../data_formats";
const logger = Logger.get("stores/QuestEditorStore");
@ -53,19 +51,19 @@ class QuestEditorStore {
load_file = (file: File) => {
const reader = new FileReader();
reader.addEventListener("loadend", () => {
this.loadend(file, reader);
this.loadend(reader);
});
reader.readAsArrayBuffer(file);
};
// TODO: notify user of problems.
private loadend = async (file: File, reader: FileReader) => {
private loadend = async (reader: FileReader) => {
if (!(reader.result instanceof ArrayBuffer)) {
logger.error("Couldn't read file.");
return;
}
const quest = parse_quest(new BufferCursor(reader.result, true));
const quest = parse_quest(new ArrayBufferCursor(reader.result, Endianness.Little));
this.set_quest(quest);
if (quest) {
@ -83,7 +81,7 @@ class QuestEditorStore {
try {
const object_geom = await entity_store.get_object_geometry(object.type);
this.set_section_on_visible_quest_entity(object, sections);
object.object_3d = create_object_object_3d(object, object_geom);
object.object_3d = create_object_mesh(object, object_geom);
} catch (e) {
logger.error(e);
}
@ -94,7 +92,7 @@ class QuestEditorStore {
try {
const npc_geom = await entity_store.get_npc_geometry(npc.type);
this.set_section_on_visible_quest_entity(npc, sections);
npc.object_3d = create_npc_object_3d(npc, npc_geom);
npc.object_3d = create_npc_mesh(npc, npc_geom);
} catch (e) {
logger.error(e);
}
@ -130,14 +128,14 @@ class QuestEditorStore {
save_current_quest_to_file = (file_name: string) => {
if (this.current_quest) {
const cursor = write_quest_qst(this.current_quest, file_name);
const buffer = write_quest_qst(this.current_quest, file_name);
if (!file_name.endsWith(".qst")) {
file_name += ".qst";
}
const a = document.createElement("a");
a.href = URL.createObjectURL(new Blob([cursor.buffer]));
a.href = URL.createObjectURL(new Blob([buffer], { type: "application/octet-stream" }));
a.download = file_name;
document.body.appendChild(a);
a.click();

View File

@ -1,5 +1,4 @@
import fs from "fs";
import { BufferCursor } from "../src/data_formats/BufferCursor";
import { parse_item_pmt, ItemPmt } from "../src/data_formats/parsing/itempmt";
import { parse_unitxt, Unitxt } from "../src/data_formats/parsing/unitxt";
import {
@ -15,7 +14,9 @@ import { NpcTypes } from "../src/domain/NpcType";
import { BoxDropDto, EnemyDropDto, ItemTypeDto, QuestDto } from "../src/dto";
import { update_drops_from_website } from "./update_drops_ephinea";
import { parse_quest } from "../src/data_formats/parsing/quest";
import { BufferCursor } from "../src/data_formats/cursor/BufferCursor";
import Logger from "js-logger";
import { Endianness } from "../src/data_formats";
const logger = Logger.get("static/update_ephinea_data");
@ -119,7 +120,7 @@ function process_quest_dir(path: string, quests: QuestDto[]): void {
function process_quest(path: string, quests: QuestDto[]): void {
try {
const buf = fs.readFileSync(path);
const q = parse_quest(new BufferCursor(buf.buffer, true), true);
const q = parse_quest(new BufferCursor(buf, Endianness.Little), true);
if (q) {
logger.trace(`Processing quest "${q.name}".`);
@ -155,7 +156,7 @@ function load_unitxt(): Unitxt {
const buf = fs.readFileSync(`${RESOURCE_DIR}/client/data/unitxt_j.prs`);
const unitxt = parse_unitxt(new BufferCursor(buf.buffer, true));
const unitxt = parse_unitxt(new BufferCursor(buf, Endianness.Little));
// Strip custom Ephinea items until we have the Ephinea ItemPMT.bin.
unitxt[1].splice(177, 50);
unitxt[1].splice(639, 59);
@ -169,7 +170,7 @@ function update_items(item_names: string[]): ItemTypeDto[] {
const buf = fs.readFileSync(`${RESOURCE_DIR}/ship-config/param/ItemPMT.bin`);
const item_pmt = parse_item_pmt(new BufferCursor(buf.buffer, true));
const item_pmt = parse_item_pmt(new BufferCursor(buf, Endianness.Little));
const item_types = new Array<ItemTypeDto>();
const ids = new Set<number>();
@ -309,7 +310,7 @@ async function load_item_pt(): Promise<ItemPt> {
const table: ItemPt = [];
const buf = await fs.promises.readFile(`${RESOURCE_DIR}/ship-config/param/ItemPT.gsl`);
const cursor = new BufferCursor(buf.buffer, false);
const cursor = new BufferCursor(buf, Endianness.Big);
cursor.seek(0x3000);

View File

@ -1,6 +1,7 @@
import fs from "fs";
import Logger from "js-logger";
import { BufferCursor } from "../src/data_formats/BufferCursor";
import { Endianness } from "../src/data_formats";
import { BufferCursor } from "../src/data_formats/cursor/BufferCursor";
import { parse_rlc } from "../src/data_formats/parsing/rlc";
const logger = Logger.get("static/update_generic_data");
@ -26,10 +27,10 @@ function update(): void {
const buf = fs.readFileSync(`${RESOURCE_DIR}/plymotiondata.rlc`);
let i = 0;
for (const file of parse_rlc(new BufferCursor(buf, false))) {
for (const file of parse_rlc(new BufferCursor(buf, Endianness.Big))) {
fs.writeFileSync(
`${PUBLIC_DIR}/player/animation/animation_${(i++).toString().padStart(3, "0")}.njm`,
file.uint8_array_view()
new Uint8Array(file.array_buffer())
);
}