Added support for several formats and PRC decryption.

This commit is contained in:
Daan Vanden Bosch 2019-06-26 17:21:05 +02:00
parent ab9a6afd54
commit 056eb50b04
28 changed files with 1055 additions and 671 deletions

View File

@ -1,231 +0,0 @@
import { ArrayBufferCursor } from './ArrayBufferCursor';
test('simple properties and invariants', () => {
const cursor = new ArrayBufferCursor(10, true);
expect(cursor.size).toBe(cursor.position + cursor.bytesLeft);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(0);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(0);
expect(cursor.bytesLeft).toBe(0);
expect(cursor.littleEndian).toBe(true);
cursor.writeU8(99).writeU8(99).writeU8(99).writeU8(99);
cursor.seek(-1);
expect(cursor.size).toBe(cursor.position + cursor.bytesLeft);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(3);
expect(cursor.bytesLeft).toBe(1);
expect(cursor.littleEndian).toBe(true);
});
test('correct byte order handling', () => {
const buffer = new Uint8Array([1, 2, 3, 4]).buffer;
expect(new ArrayBufferCursor(buffer, false).u32()).toBe(0x01020304);
expect(new ArrayBufferCursor(buffer, true).u32()).toBe(0x04030201);
});
test('reallocation of internal buffer when necessary', () => {
const cursor = new ArrayBufferCursor(3, true);
cursor.writeU8(99).writeU8(99).writeU8(99).writeU8(99);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBeGreaterThanOrEqual(4);
expect(cursor.buffer.byteLength).toBeGreaterThanOrEqual(4);
});
function testIntegerRead(methodName: string) {
test(methodName, () => {
const bytes = parseInt(methodName.replace(/^[iu](\d+)$/, '$1'), 10) / 8;
let testNumber1 = 0;
let testNumber2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const testArrays: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
testNumber1 <<= 8;
testNumber1 |= i;
testArrays['false'].push(i);
testArrays['true'].unshift(i);
}
for (let i = bytes + 1; i <= 2 * bytes; ++i) {
testNumber2 <<= 8;
testNumber2 |= i;
testArrays['false'].push(i);
testArrays['true'].splice(bytes, 0, i);
}
for (const littleEndian of [false, true]) {
const cursor = new ArrayBufferCursor(
new Uint8Array(testArrays[String(littleEndian)]).buffer, littleEndian);
expect((cursor as any)[methodName]()).toBe(testNumber1);
expect(cursor.position).toBe(bytes);
expect((cursor as any)[methodName]()).toBe(testNumber2);
expect(cursor.position).toBe(2 * bytes);
}
});
}
testIntegerRead('u8');
testIntegerRead('u16');
testIntegerRead('u32');
testIntegerRead('i32');
test('u8Array', () => {
const cursor = new ArrayBufferCursor(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]).buffer, true);
expect(cursor.u8Array(3)).toEqual([1, 2, 3]);
expect(cursor.seekStart(2).u8Array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.seekStart(5).u8Array(3)).toEqual([6, 7, 8]);
});
function testStringRead(methodName: string, charSize: number) {
test(methodName, () => {
const charArray = [7, 65, 66, 0, 255, 13];
for (const littleEndian of [false, true]) {
const charArrayCopy = [];
for (const char of charArray) {
if (littleEndian) charArrayCopy.push(char);
for (let i = 0; i < charSize - 1; ++i) {
charArrayCopy.push(0);
}
if (!littleEndian) charArrayCopy.push(char);
}
const cursor = new ArrayBufferCursor(
new Uint8Array(charArrayCopy).buffer, littleEndian);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](4 * charSize, true, true)).toBe('AB');
expect(cursor.position).toBe(5 * charSize);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](2 * charSize, true, true)).toBe('AB');
expect(cursor.position).toBe(3 * charSize);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](4 * charSize, true, false)).toBe('AB');
expect(cursor.position).toBe(4 * charSize);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](2 * charSize, true, false)).toBe('AB');
expect(cursor.position).toBe(3 * charSize);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](4 * charSize, false, true)).toBe('AB\0ÿ');
expect(cursor.position).toBe(5 * charSize);
cursor.seekStart(charSize);
expect((cursor as any)[methodName](4 * charSize, false, false)).toBe('AB\0ÿ');
expect(cursor.position).toBe(5 * charSize);
}
});
}
testStringRead('stringAscii', 1);
testStringRead('stringUtf16', 2);
function testIntegerWrite(methodName: string) {
test(methodName, () => {
const bytes = parseInt(methodName.replace(/^write[IU](\d+)$/, '$1'), 10) / 8;
let testNumber1 = 0;
let testNumber2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const testArrays1: { [index: string]: number[] } = { false: [], true: [] };
const testArrays2: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
testNumber1 <<= 8;
testNumber1 |= i;
testNumber2 <<= 8;
testNumber2 |= i + bytes;
testArrays1['false'].push(i);
testArrays1['true'].unshift(i);
testArrays2['false'].push(i + bytes);
testArrays2['true'].unshift(i + bytes);
}
for (const littleEndian of [false, true]) {
const cursor = new ArrayBufferCursor(0, littleEndian);
(cursor as any)[methodName](testNumber1);
expect(cursor.position).toBe(bytes);
expect(cursor.seekStart(0).u8Array(bytes))
.toEqual(testArrays1[String(littleEndian)]);
expect(cursor.position).toBe(bytes);
(cursor as any)[methodName](testNumber2);
expect(cursor.position).toBe(2 * bytes);
expect(cursor.seekStart(0).u8Array(2 * bytes))
.toEqual(testArrays1[String(littleEndian)].concat(testArrays2[String(littleEndian)]));
}
});
}
testIntegerWrite('writeU8');
testIntegerWrite('writeU16');
testIntegerWrite('writeU32');
test('writeF32', () => {
for (const littleEndian of [false, true]) {
const cursor = new ArrayBufferCursor(0, littleEndian);
cursor.writeF32(1337.9001);
expect(cursor.position).toBe(4);
expect(cursor.seek(-4).f32()).toBeCloseTo(1337.9001, 4);
expect(cursor.position).toBe(4);
cursor.writeF32(103.502);
expect(cursor.position).toBe(8);
expect(cursor.seek(-4).f32()).toBeCloseTo(103.502, 3);
}
});
test('writeU8Array', () => {
for (const littleEndian of [false, true]) {
const bytes = 10;
const cursor = new ArrayBufferCursor(2 * bytes, littleEndian);
const uint8Array = new Uint8Array(cursor.buffer);
const testArray1 = [];
const testArray2 = [];
for (let i = 1; i <= bytes; ++i) {
testArray1.push(i);
testArray2.push(i + bytes);
}
cursor.writeU8Array(testArray1);
expect(cursor.position).toBe(bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8Array[i]).toBe(testArray1[i]);
}
cursor.writeU8Array(testArray2);
expect(cursor.position).toBe(2 * bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8Array[i]).toBe(testArray1[i]);
}
for (let i = 0; i < bytes; ++i) {
expect(uint8Array[i + bytes]).toBe(testArray2[i]);
}
}
});

View File

@ -0,0 +1,231 @@
import { BufferCursor } from './BufferCursor';
test('simple properties and invariants', () => {
const cursor = new BufferCursor(10, true);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(0);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(0);
expect(cursor.bytes_left).toBe(0);
expect(cursor.little_endian).toBe(true);
cursor.write_u8(99).write_u8(99).write_u8(99).write_u8(99);
cursor.seek(-1);
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.size).toBeLessThanOrEqual(cursor.capacity);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBe(10);
expect(cursor.position).toBe(3);
expect(cursor.bytes_left).toBe(1);
expect(cursor.little_endian).toBe(true);
});
test('correct byte order handling', () => {
const buffer = new Uint8Array([1, 2, 3, 4]).buffer;
expect(new BufferCursor(buffer, false).u32()).toBe(0x01020304);
expect(new BufferCursor(buffer, true).u32()).toBe(0x04030201);
});
test('reallocation of internal buffer when necessary', () => {
const cursor = new BufferCursor(3, true);
cursor.write_u8(99).write_u8(99).write_u8(99).write_u8(99);
expect(cursor.size).toBe(4);
expect(cursor.capacity).toBeGreaterThanOrEqual(4);
expect(cursor.buffer.byteLength).toBeGreaterThanOrEqual(4);
});
function test_integer_read(method_name: string) {
test(method_name, () => {
const bytes = parseInt(method_name.replace(/^[iu](\d+)$/, '$1'), 10) / 8;
let test_number_1 = 0;
let test_number_2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const test_arrays: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
test_number_1 <<= 8;
test_number_1 |= i;
test_arrays['false'].push(i);
test_arrays['true'].unshift(i);
}
for (let i = bytes + 1; i <= 2 * bytes; ++i) {
test_number_2 <<= 8;
test_number_2 |= i;
test_arrays['false'].push(i);
test_arrays['true'].splice(bytes, 0, i);
}
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(
new Uint8Array(test_arrays[String(little_endian)]).buffer, little_endian);
expect((cursor as any)[method_name]()).toBe(test_number_1);
expect(cursor.position).toBe(bytes);
expect((cursor as any)[method_name]()).toBe(test_number_2);
expect(cursor.position).toBe(2 * bytes);
}
});
}
test_integer_read('u8');
test_integer_read('u16');
test_integer_read('u32');
test_integer_read('i32');
test('u8_array', () => {
const cursor = new BufferCursor(new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8]).buffer, true);
expect(cursor.u8_array(3)).toEqual([1, 2, 3]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.seek_start(5).u8_array(3)).toEqual([6, 7, 8]);
});
function test_string_read(method_name: string, char_size: number) {
test(method_name, () => {
const char_array = [7, 65, 66, 0, 255, 13];
for (const little_endian of [false, true]) {
const char_array_copy = [];
for (const char of char_array) {
if (little_endian) char_array_copy.push(char);
for (let i = 0; i < char_size - 1; ++i) {
char_array_copy.push(0);
}
if (!little_endian) char_array_copy.push(char);
}
const cursor = new BufferCursor(
new Uint8Array(char_array_copy).buffer, little_endian);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, true)).toBe('AB');
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, true)).toBe('AB');
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, true, false)).toBe('AB');
expect(cursor.position).toBe(4 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](2 * char_size, true, false)).toBe('AB');
expect(cursor.position).toBe(3 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, true)).toBe('AB\0ÿ');
expect(cursor.position).toBe(5 * char_size);
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, false)).toBe('AB\0ÿ');
expect(cursor.position).toBe(5 * char_size);
}
});
}
test_string_read('string_ascii', 1);
test_string_read('string_utf16', 2);
function test_integer_write(method_name: string) {
test(method_name, () => {
const bytes = parseInt(method_name.replace(/^write_[iu](\d+)$/, '$1'), 10) / 8;
let test_number_1 = 0;
let test_number_2 = 0;
// The "false" arrays are for big endian tests and the "true" arrays for little endian tests.
const test_arrays_1: { [index: string]: number[] } = { false: [], true: [] };
const test_arrays_2: { [index: string]: number[] } = { false: [], true: [] };
for (let i = 1; i <= bytes; ++i) {
// Generates numbers of the form 0x010203...
test_number_1 <<= 8;
test_number_1 |= i;
test_number_2 <<= 8;
test_number_2 |= i + bytes;
test_arrays_1['false'].push(i);
test_arrays_1['true'].unshift(i);
test_arrays_2['false'].push(i + bytes);
test_arrays_2['true'].unshift(i + bytes);
}
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(0, little_endian);
(cursor as any)[method_name](test_number_1);
expect(cursor.position).toBe(bytes);
expect(cursor.seek_start(0).u8_array(bytes))
.toEqual(test_arrays_1[String(little_endian)]);
expect(cursor.position).toBe(bytes);
(cursor as any)[method_name](test_number_2);
expect(cursor.position).toBe(2 * bytes);
expect(cursor.seek_start(0).u8_array(2 * bytes))
.toEqual(test_arrays_1[String(little_endian)].concat(test_arrays_2[String(little_endian)]));
}
});
}
test_integer_write('write_u8');
test_integer_write('write_u16');
test_integer_write('write_u32');
test('write_f32', () => {
for (const little_endian of [false, true]) {
const cursor = new BufferCursor(0, little_endian);
cursor.write_f32(1337.9001);
expect(cursor.position).toBe(4);
expect(cursor.seek(-4).f32()).toBeCloseTo(1337.9001, 4);
expect(cursor.position).toBe(4);
cursor.write_f32(103.502);
expect(cursor.position).toBe(8);
expect(cursor.seek(-4).f32()).toBeCloseTo(103.502, 3);
}
});
test('write_u8_array', () => {
for (const little_endian of [false, true]) {
const bytes = 10;
const cursor = new BufferCursor(2 * bytes, little_endian);
const uint8_array = new Uint8Array(cursor.buffer);
const test_array_1 = [];
const test_array_2 = [];
for (let i = 1; i <= bytes; ++i) {
test_array_1.push(i);
test_array_2.push(i + bytes);
}
cursor.write_u8_array(test_array_1);
expect(cursor.position).toBe(bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i]).toBe(test_array_1[i]);
}
cursor.write_u8_array(test_array_2);
expect(cursor.position).toBe(2 * bytes);
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i]).toBe(test_array_1[i]);
}
for (let i = 0; i < bytes; ++i) {
expect(uint8_array[i + bytes]).toBe(test_array_2[i]);
}
}
});

View File

@ -13,7 +13,7 @@ const UTF_16LE_ENCODER = new TextEncoder('utf-16le');
* A cursor for reading and writing binary data.
* Uses an ArrayBuffer internally. This buffer is reallocated if and only if a write beyond the current capacity happens.
*/
export class ArrayBufferCursor {
export class BufferCursor {
private _size: number = 0;
/**
@ -28,7 +28,7 @@ export class ArrayBufferCursor {
throw new Error('Size should be non-negative.')
}
this.ensureCapacity(size);
this.ensure_capacity(size);
this._size = size;
}
@ -37,15 +37,25 @@ export class ArrayBufferCursor {
*/
position: number;
private _little_endian: boolean = false;
/**
* Byte order mode.
*/
littleEndian: boolean;
get little_endian(): boolean {
return this._little_endian;
}
set little_endian(little_endian: boolean) {
this._little_endian = little_endian;
this.utf16_decoder = little_endian ? UTF_16LE_DECODER : UTF_16BE_DECODER;
this.utf16_encoder = little_endian ? UTF_16LE_ENCODER : UTF_16BE_ENCODER;
}
/**
* The amount of bytes left to read from the current position onward.
*/
get bytesLeft(): number {
get bytes_left(): number {
return this.size - this.position;
}
@ -59,31 +69,31 @@ export class ArrayBufferCursor {
buffer: ArrayBuffer;
private dv: DataView;
private uint8Array: Uint8Array;
private utf16Decoder: TextDecoder;
private utf16Encoder: TextEncoder;
private utf16_decoder: TextDecoder = UTF_16BE_DECODER;
private utf16_encoder: TextEncoder = UTF_16BE_ENCODER;
/**
* @param bufferOrCapacity - If an ArrayBuffer is given, writes to the cursor will be reflected in this array buffer and vice versa until a cursor write that requires allocating a new internal buffer happens
* @param littleEndian - Decides in which byte order multi-byte integers and floats will be interpreted
* @param buffer_or_capacity - If an ArrayBuffer or Buffer is given, writes to the cursor will be reflected in this buffer and vice versa until a cursor write that requires allocating a new internal buffer happens
* @param little_endian - Decides in which byte order multi-byte integers and floats will be interpreted
*/
constructor(bufferOrCapacity: ArrayBuffer | number, littleEndian: boolean = false) {
if (typeof bufferOrCapacity === 'number') {
this.buffer = new ArrayBuffer(bufferOrCapacity);
constructor(buffer_or_capacity: ArrayBuffer | Buffer | number, little_endian: boolean = false) {
if (typeof buffer_or_capacity === 'number') {
this.buffer = new ArrayBuffer(buffer_or_capacity);
this.size = 0;
} else if (bufferOrCapacity instanceof ArrayBuffer) {
this.buffer = bufferOrCapacity;
this.size = this.buffer.byteLength;
} else if (buffer_or_capacity instanceof ArrayBuffer) {
this.buffer = buffer_or_capacity;
this.size = buffer_or_capacity.byteLength;
} else if (buffer_or_capacity instanceof Buffer) {
// Use the backing ArrayBuffer.
this.buffer = buffer_or_capacity.buffer;
this.size = buffer_or_capacity.byteLength;
} else {
throw new Error('buffer_or_capacity should be an ArrayBuffer or a number.');
throw new Error('buffer_or_capacity should be an ArrayBuffer, a Buffer or a number.');
}
this.littleEndian = littleEndian;
this.little_endian = little_endian;
this.position = 0;
this.dv = new DataView(this.buffer);
this.uint8Array = new Uint8Array(this.buffer, 0, this.size);
this.utf16Decoder = littleEndian ? UTF_16LE_DECODER : UTF_16BE_DECODER;
this.utf16Encoder = littleEndian ? UTF_16LE_ENCODER : UTF_16BE_ENCODER;
}
/**
@ -92,7 +102,7 @@ export class ArrayBufferCursor {
* @param offset - if positive, seeks forward by offset bytes, otherwise seeks backward by -offset bytes.
*/
seek(offset: number) {
return this.seekStart(this.position + offset);
return this.seek_start(this.position + offset);
}
/**
@ -100,7 +110,7 @@ export class ArrayBufferCursor {
*
* @param offset - greater or equal to 0 and smaller than size
*/
seekStart(offset: number) {
seek_start(offset: number) {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
@ -114,7 +124,7 @@ export class ArrayBufferCursor {
*
* @param offset - greater or equal to 0 and smaller than size
*/
seekEnd(offset: number) {
seek_end(offset: number) {
if (offset < 0 || offset > this.size) {
throw new Error(`Offset ${offset} is out of bounds.`);
}
@ -134,7 +144,7 @@ export class ArrayBufferCursor {
* Reads an unsigned 16-bit integer and increments position by 2.
*/
u16() {
const r = this.dv.getUint16(this.position, this.littleEndian);
const r = this.dv.getUint16(this.position, this.little_endian);
this.position += 2;
return r;
}
@ -143,7 +153,7 @@ export class ArrayBufferCursor {
* Reads an unsigned 32-bit integer and increments position by 4.
*/
u32() {
const r = this.dv.getUint32(this.position, this.littleEndian);
const r = this.dv.getUint32(this.position, this.little_endian);
this.position += 4;
return r;
}
@ -159,7 +169,7 @@ export class ArrayBufferCursor {
* Reads a signed 16-bit integer and increments position by 2.
*/
i16() {
const r = this.dv.getInt16(this.position, this.littleEndian);
const r = this.dv.getInt16(this.position, this.little_endian);
this.position += 2;
return r;
}
@ -168,7 +178,7 @@ export class ArrayBufferCursor {
* Reads a signed 32-bit integer and increments position by 4.
*/
i32() {
const r = this.dv.getInt32(this.position, this.littleEndian);
const r = this.dv.getInt32(this.position, this.little_endian);
this.position += 4;
return r;
}
@ -177,7 +187,7 @@ export class ArrayBufferCursor {
* Reads a 32-bit floating point number and increments position by 4.
*/
f32() {
const r = this.dv.getFloat32(this.position, this.littleEndian);
const r = this.dv.getFloat32(this.position, this.little_endian);
this.position += 4;
return r;
}
@ -185,7 +195,7 @@ export class ArrayBufferCursor {
/**
* Reads n unsigned 8-bit integers and increments position by n.
*/
u8Array(n: number): number[] {
u8_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) array.push(this.dv.getUint8(this.position++));
return array;
@ -194,11 +204,11 @@ export class ArrayBufferCursor {
/**
* Reads n unsigned 16-bit integers and increments position by 2n.
*/
u16Array(n: number): number[] {
u16_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint16(this.position, this.littleEndian));
array.push(this.dv.getUint16(this.position, this.little_endian));
this.position += 2;
}
@ -208,11 +218,11 @@ export class ArrayBufferCursor {
/**
* Reads n unsigned 32-bit integers and increments position by 4n.
*/
u32Array(n: number): number[] {
u32_array(n: number): number[] {
const array = [];
for (let i = 0; i < n; ++i) {
array.push(this.dv.getUint32(this.position, this.littleEndian));
array.push(this.dv.getUint32(this.position, this.little_endian));
this.position += 4;
}
@ -225,53 +235,53 @@ export class ArrayBufferCursor {
* @param size - the amount bytes to consume.
* @returns a new cursor containing size bytes.
*/
take(size: number): ArrayBufferCursor {
take(size: number): BufferCursor {
if (size < 0 || size > this.size - this.position) {
throw new Error(`Size ${size} out of bounds.`);
}
this.position += size;
return new ArrayBufferCursor(
this.buffer.slice(this.position - size, this.position), this.littleEndian);
return new BufferCursor(
this.buffer.slice(this.position - size, this.position), this.little_endian);
}
/**
* Consumes up to maxByteLength bytes.
*/
stringAscii(maxByteLength: number, nullTerminated: boolean, dropRemaining: boolean) {
const string_length = nullTerminated
? this.indexOfU8(0, maxByteLength) - this.position
: maxByteLength;
string_ascii(max_byte_length: number, null_terminated: boolean, drop_remaining: boolean) {
const string_length = null_terminated
? this.index_of_u8(0, max_byte_length) - this.position
: max_byte_length;
const r = ASCII_DECODER.decode(
new DataView(this.buffer, this.position, string_length));
this.position += dropRemaining
? maxByteLength
: Math.min(string_length + 1, maxByteLength);
this.position += drop_remaining
? max_byte_length
: Math.min(string_length + 1, max_byte_length);
return r;
}
/**
* Consumes up to maxByteLength bytes.
*/
stringUtf16(maxByteLength: number, nullTerminated: boolean, dropRemaining: boolean) {
const stringLength = nullTerminated
? this.indexOfU16(0, maxByteLength) - this.position
: Math.floor(maxByteLength / 2) * 2;
string_utf16(max_byte_length: number, null_terminated: boolean, drop_remaining: boolean) {
const string_length = null_terminated
? this.index_of_u16(0, max_byte_length) - this.position
: Math.floor(max_byte_length / 2) * 2;
const r = this.utf16Decoder.decode(
new DataView(this.buffer, this.position, stringLength));
this.position += dropRemaining
? maxByteLength
: Math.min(stringLength + 2, maxByteLength);
const r = this.utf16_decoder.decode(
new DataView(this.buffer, this.position, string_length));
this.position += drop_remaining
? max_byte_length
: Math.min(string_length + 2, max_byte_length);
return r;
}
/**
* Writes an unsigned 8-bit integer and increments position by 1. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeU8(value: number) {
this.ensureCapacity(this.position + 1);
write_u8(value: number) {
this.ensure_capacity(this.position + 1);
this.dv.setUint8(this.position++, value);
@ -285,10 +295,10 @@ export class ArrayBufferCursor {
/**
* Writes an unsigned 16-bit integer and increments position by 2. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeU16(value: number) {
this.ensureCapacity(this.position + 2);
write_u16(value: number) {
this.ensure_capacity(this.position + 2);
this.dv.setUint16(this.position, value, this.littleEndian);
this.dv.setUint16(this.position, value, this.little_endian);
this.position += 2;
if (this.position > this.size) {
@ -301,10 +311,10 @@ export class ArrayBufferCursor {
/**
* Writes an unsigned 32-bit integer and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeU32(value: number) {
this.ensureCapacity(this.position + 4);
write_u32(value: number) {
this.ensure_capacity(this.position + 4);
this.dv.setUint32(this.position, value, this.littleEndian);
this.dv.setUint32(this.position, value, this.little_endian);
this.position += 4;
if (this.position > this.size) {
@ -317,10 +327,10 @@ export class ArrayBufferCursor {
/**
* Writes a signed 32-bit integer and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeI32(value: number) {
this.ensureCapacity(this.position + 4);
write_i32(value: number) {
this.ensure_capacity(this.position + 4);
this.dv.setInt32(this.position, value, this.littleEndian);
this.dv.setInt32(this.position, value, this.little_endian);
this.position += 4;
if (this.position > this.size) {
@ -333,10 +343,10 @@ export class ArrayBufferCursor {
/**
* Writes a 32-bit floating point number and increments position by 4. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeF32(value: number) {
this.ensureCapacity(this.position + 4);
write_f32(value: number) {
this.ensure_capacity(this.position + 4);
this.dv.setFloat32(this.position, value, this.littleEndian);
this.dv.setFloat32(this.position, value, this.little_endian);
this.position += 4;
if (this.position > this.size) {
@ -349,8 +359,8 @@ export class ArrayBufferCursor {
/**
* Writes an array of unsigned 8-bit integers and increments position by the array's length. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeU8Array(array: number[]) {
this.ensureCapacity(this.position + array.length);
write_u8_array(array: number[]) {
this.ensure_capacity(this.position + array.length);
new Uint8Array(this.buffer, this.position).set(new Uint8Array(array));
this.position += array.length;
@ -365,8 +375,8 @@ export class ArrayBufferCursor {
/**
* Writes the contents of other and increments position by the size of other. If necessary, grows the cursor and reallocates the underlying buffer.
*/
writeCursor(other: ArrayBufferCursor) {
this.ensureCapacity(this.position + other.size);
write_cursor(other: BufferCursor) {
this.ensure_capacity(this.position + other.size);
new Uint8Array(this.buffer, this.position).set(new Uint8Array(other.buffer));
this.position += other.size;
@ -378,18 +388,18 @@ export class ArrayBufferCursor {
return this;
}
writeStringAscii(str: string, byteLength: number) {
write_string_ascii(str: string, byte_length: number) {
let i = 0;
for (const byte of ASCII_ENCODER.encode(str)) {
if (i < byteLength) {
this.writeU8(byte);
if (i < byte_length) {
this.write_u8(byte);
++i;
}
}
while (i < byteLength) {
this.writeU8(0);
while (i < byte_length) {
this.write_u8(0);
++i;
}
}
@ -397,50 +407,49 @@ export class ArrayBufferCursor {
/**
* @returns a Uint8Array that remains a write-through view of the underlying array buffer until the buffer is reallocated.
*/
uint8ArrayView(): Uint8Array {
return this.uint8Array;
uint8_array_view(): Uint8Array {
return new Uint8Array(this.buffer, 0, this.size);
}
private indexOfU8(value: number, maxByteLength: number) {
const maxPos = Math.min(this.position + maxByteLength, this.size);
private index_of_u8(value: number, max_byte_length: number) {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < maxPos; ++i) {
for (let i = this.position; i < max_pos; ++i) {
if (this.dv.getUint8(i) === value) {
return i;
}
}
return this.position + maxByteLength;
return this.position + max_byte_length;
}
private indexOfU16(value: number, maxByteLength: number) {
const maxPos = Math.min(this.position + maxByteLength, this.size);
private index_of_u16(value: number, max_byte_length: number) {
const max_pos = Math.min(this.position + max_byte_length, this.size);
for (let i = this.position; i < maxPos; i += 2) {
if (this.dv.getUint16(i, this.littleEndian) === value) {
for (let i = this.position; i < max_pos; i += 2) {
if (this.dv.getUint16(i, this.little_endian) === value) {
return i;
}
}
return this.position + maxByteLength;
return this.position + max_byte_length;
}
/**
* Increases buffer size if necessary.
*/
private ensureCapacity(minNewSize: number) {
if (minNewSize > this.capacity) {
let newSize = this.capacity || minNewSize;
private ensure_capacity(min_new_size: number) {
if (min_new_size > this.capacity) {
let new_size = this.capacity || min_new_size;
do {
newSize *= 2;
} while (newSize < minNewSize);
new_size *= 2;
} while (new_size < min_new_size);
const newBuffer = new ArrayBuffer(newSize);
new Uint8Array(newBuffer).set(new Uint8Array(this.buffer, 0, this.size));
this.buffer = newBuffer;
const new_buffer = new ArrayBuffer(new_size);
new Uint8Array(new_buffer).set(new Uint8Array(this.buffer, 0, this.size));
this.buffer = new_buffer;
this.dv = new DataView(this.buffer);
this.uint8Array = new Uint8Array(this.buffer, 0, minNewSize);
}
}
}

View File

@ -2,54 +2,54 @@
* This code is based on the Sylverant PRS compression code written by Lawrence Sebald.
*/
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
export function compress(src: ArrayBufferCursor): ArrayBufferCursor {
export function compress(src: BufferCursor): BufferCursor {
const ctx = new Context(src);
const hashTable = new HashTable();
const hash_table = new HashTable();
if (ctx.src.size <= 3) {
// Make a literal copy of the input.
while (ctx.src.bytesLeft) {
ctx.setBit(1);
ctx.copyLiteral();
while (ctx.src.bytes_left) {
ctx.set_bit(1);
ctx.copy_literal();
}
} else {
// Add the first two "strings" to the hash table.
hashTable.put(hashTable.hash(ctx.src), 0);
hash_table.put(hash_table.hash(ctx.src), 0);
ctx.src.seek(1);
hashTable.put(hashTable.hash(ctx.src), 1);
hash_table.put(hash_table.hash(ctx.src), 1);
ctx.src.seek(-1);
// Copy the first two bytes as literals.
ctx.setBit(1);
ctx.copyLiteral();
ctx.setBit(1);
ctx.copyLiteral();
ctx.set_bit(1);
ctx.copy_literal();
ctx.set_bit(1);
ctx.copy_literal();
while (ctx.src.bytesLeft > 1) {
let [offset, mlen] = ctx.findLongestMatch(hashTable, false);
while (ctx.src.bytes_left > 1) {
let [offset, mlen] = ctx.find_longest_match(hash_table, false);
if (mlen > 0) {
ctx.src.seek(1);
const [offset2, mlen2] = ctx.findLongestMatch(hashTable, true);
const [offset2, mlen2] = ctx.find_longest_match(hash_table, true);
ctx.src.seek(-1);
// Did the "lazy match" produce something more compressed?
if (mlen2 > mlen) {
let copyLiteral = true;
let copy_literal = true;
// Check if it is a good idea to switch from a short match to a long one.
if (mlen >= 2 && mlen <= 5 && offset2 < offset) {
if (offset >= -256 && offset2 < -256) {
if (mlen2 - mlen < 3) {
copyLiteral = false;
copy_literal = false;
}
}
}
if (copyLiteral) {
ctx.setBit(1);
ctx.copyLiteral();
if (copy_literal) {
ctx.set_bit(1);
ctx.copy_literal();
continue;
}
}
@ -57,20 +57,20 @@ export function compress(src: ArrayBufferCursor): ArrayBufferCursor {
// What kind of match did we find?
if (mlen >= 2 && mlen <= 5 && offset >= -256) {
// Short match.
ctx.setBit(0);
ctx.setBit(0);
ctx.setBit((mlen - 2) & 0x02);
ctx.setBit((mlen - 2) & 0x01);
ctx.writeLiteral(offset & 0xFF);
ctx.addIntermediates(hashTable, mlen);
ctx.set_bit(0);
ctx.set_bit(0);
ctx.set_bit((mlen - 2) & 0x02);
ctx.set_bit((mlen - 2) & 0x01);
ctx.write_literal(offset & 0xFF);
ctx.add_intermediates(hash_table, mlen);
continue;
} else if (mlen >= 3 && mlen <= 9) {
// Long match, short length.
ctx.setBit(0);
ctx.setBit(1);
ctx.writeLiteral(((offset & 0x1F) << 3) | ((mlen - 2) & 0x07));
ctx.writeLiteral(offset >> 5);
ctx.addIntermediates(hashTable, mlen);
ctx.set_bit(0);
ctx.set_bit(1);
ctx.write_literal(((offset & 0x1F) << 3) | ((mlen - 2) & 0x07));
ctx.write_literal(offset >> 5);
ctx.add_intermediates(hash_table, mlen);
continue;
} else if (mlen > 9) {
// Long match, long length.
@ -78,31 +78,31 @@ export function compress(src: ArrayBufferCursor): ArrayBufferCursor {
mlen = 256;
}
ctx.setBit(0);
ctx.setBit(1);
ctx.writeLiteral((offset & 0x1F) << 3);
ctx.writeLiteral(offset >> 5);
ctx.writeLiteral(mlen - 1);
ctx.addIntermediates(hashTable, mlen);
ctx.set_bit(0);
ctx.set_bit(1);
ctx.write_literal((offset & 0x1F) << 3);
ctx.write_literal(offset >> 5);
ctx.write_literal(mlen - 1);
ctx.add_intermediates(hash_table, mlen);
continue;
}
}
// If we get here, we didn't find a suitable match, so just we just make a literal copy.
ctx.setBit(1);
ctx.copyLiteral();
ctx.set_bit(1);
ctx.copy_literal();
}
// If there's a left over byte at the end, make a literal copy.
if (ctx.src.bytesLeft) {
ctx.setBit(1);
ctx.copyLiteral();
if (ctx.src.bytes_left) {
ctx.set_bit(1);
ctx.copy_literal();
}
}
ctx.writeEof();
ctx.write_eof();
return ctx.dst.seekStart(0);
return ctx.dst.seek_start(0);
}
const MAX_WINDOW = 0x2000;
@ -110,31 +110,31 @@ const WINDOW_MASK = MAX_WINDOW - 1;
const HASH_SIZE = 1 << 8;
class Context {
src: ArrayBufferCursor;
dst: ArrayBufferCursor;
src: BufferCursor;
dst: BufferCursor;
flags: number;
flagBitsLeft: number;
flagOffset: number;
flag_bits_left: number;
flag_offset: number;
constructor(cursor: ArrayBufferCursor) {
constructor(cursor: BufferCursor) {
this.src = cursor;
this.dst = new ArrayBufferCursor(cursor.size, cursor.littleEndian);
this.dst = new BufferCursor(cursor.size, cursor.little_endian);
this.flags = 0;
this.flagBitsLeft = 0;
this.flagOffset = 0;
this.flag_bits_left = 0;
this.flag_offset = 0;
}
setBit(bit: number): void {
if (!this.flagBitsLeft--) {
set_bit(bit: number): void {
if (!this.flag_bits_left--) {
// Write out the flags to their position in the file, and store the next flags byte position.
const pos = this.dst.position;
this.dst
.seekStart(this.flagOffset)
.writeU8(this.flags)
.seekStart(pos)
.writeU8(0); // Placeholder for the next flags byte.
this.flagOffset = pos;
this.flagBitsLeft = 7;
.seek_start(this.flag_offset)
.write_u8(this.flags)
.seek_start(pos)
.write_u8(0); // Placeholder for the next flags byte.
this.flag_offset = pos;
this.flag_bits_left = 7;
}
this.flags >>>= 1;
@ -144,35 +144,35 @@ class Context {
}
}
copyLiteral(): void {
this.dst.writeU8(this.src.u8());
copy_literal(): void {
this.dst.write_u8(this.src.u8());
}
writeLiteral(value: number): void {
this.dst.writeU8(value);
write_literal(value: number): void {
this.dst.write_u8(value);
}
writeFinalFlags(): void {
this.flags >>>= this.flagBitsLeft;
this.flags >>>= this.flag_bits_left;
const pos = this.dst.position;
this.dst
.seekStart(this.flagOffset)
.writeU8(this.flags)
.seekStart(pos);
.seek_start(this.flag_offset)
.write_u8(this.flags)
.seek_start(pos);
}
writeEof(): void {
this.setBit(0);
this.setBit(1);
write_eof(): void {
this.set_bit(0);
this.set_bit(1);
this.writeFinalFlags();
this.writeLiteral(0);
this.writeLiteral(0);
this.write_literal(0);
this.write_literal(0);
}
matchLength(s2: number): number {
const array = this.src.uint8ArrayView();
match_length(s2: number): number {
const array = this.src.uint8_array_view();
let len = 0;
let s1 = this.src.position;
@ -185,20 +185,20 @@ class Context {
return len;
}
findLongestMatch(hashTable: HashTable, lazy: boolean): [number, number] {
if (!this.src.bytesLeft) {
find_longest_match(hash_table: HashTable, lazy: boolean): [number, number] {
if (!this.src.bytes_left) {
return [0, 0];
}
// Figure out where we're looking.
const hash = hashTable.hash(this.src);
const hash = hash_table.hash(this.src);
// If there is nothing in the table at that point, bail out now.
let entry = hashTable.get(hash);
let entry = hash_table.get(hash);
if (entry === null) {
if (!lazy) {
hashTable.put(hash, this.src.position);
hash_table.put(hash, this.src.position);
}
return [0, 0];
@ -206,10 +206,10 @@ class Context {
// If we'd go outside the window, truncate the hash chain now.
if (this.src.position - entry > MAX_WINDOW) {
hashTable.hashToOffset[hash] = null;
hash_table.hash_to_offset[hash] = null;
if (!lazy) {
hashTable.put(hash, this.src.position);
hash_table.put(hash, this.src.position);
}
return [0, 0];
@ -217,60 +217,60 @@ class Context {
// Ok, we have something in the hash table that matches the hash value.
// Follow the chain to see if we have an actual string match, and find the longest match.
let longestLength = 0;
let longestMatch = 0;
let longest_length = 0;
let longest_match = 0;
while (entry != null) {
const mlen = this.matchLength(entry);
const mlen = this.match_length(entry);
if (mlen > longestLength || mlen >= 256) {
longestLength = mlen;
longestMatch = entry;
if (mlen > longest_length || mlen >= 256) {
longest_length = mlen;
longest_match = entry;
}
// Follow the chain, making sure not to exceed a difference of MAX_WINDOW.
let entry2 = hashTable.prev(entry);
let entry_2 = hash_table.prev(entry);
if (entry2 !== null) {
if (entry_2 !== null) {
// If we'd go outside the window, truncate the hash chain now.
if (this.src.position - entry2 > MAX_WINDOW) {
hashTable.setPrev(entry, null);
entry2 = null;
if (this.src.position - entry_2 > MAX_WINDOW) {
hash_table.set_prev(entry, null);
entry_2 = null;
}
}
entry = entry2;
entry = entry_2;
}
// Add our current string to the hash.
if (!lazy) {
hashTable.put(hash, this.src.position);
hash_table.put(hash, this.src.position);
}
// Did we find a match?
const offset = longestLength > 0 ? longestMatch - this.src.position : 0;
return [offset, longestLength];
const offset = longest_length > 0 ? longest_match - this.src.position : 0;
return [offset, longest_length];
}
addIntermediates(hashTable: HashTable, len: number): void {
add_intermediates(hash_table: HashTable, len: number): void {
this.src.seek(1);
for (let i = 1; i < len; ++i) {
const hash = hashTable.hash(this.src);
hashTable.put(hash, this.src.position);
const hash = hash_table.hash(this.src);
hash_table.put(hash, this.src.position);
this.src.seek(1);
}
}
}
class HashTable {
hashToOffset: Array<number | null> = new Array(HASH_SIZE).fill(null);
maskedOffsetToPrev: Array<number | null> = new Array(MAX_WINDOW).fill(null);
hash_to_offset: Array<number | null> = new Array(HASH_SIZE).fill(null);
masked_offset_to_prev: Array<number | null> = new Array(MAX_WINDOW).fill(null);
hash(cursor: ArrayBufferCursor): number {
hash(cursor: BufferCursor): number {
let hash = cursor.u8();
if (cursor.bytesLeft) {
if (cursor.bytes_left) {
hash ^= cursor.u8();
cursor.seek(-1);
}
@ -280,19 +280,19 @@ class HashTable {
}
get(hash: number): number | null {
return this.hashToOffset[hash];
return this.hash_to_offset[hash];
}
put(hash: number, offset: number): void {
this.setPrev(offset, this.hashToOffset[hash]);
this.hashToOffset[hash] = offset;
this.set_prev(offset, this.hash_to_offset[hash]);
this.hash_to_offset[hash] = offset;
}
prev(offset: number): number | null {
return this.maskedOffsetToPrev[offset & WINDOW_MASK];
return this.masked_offset_to_prev[offset & WINDOW_MASK];
}
setPrev(offset: number, prevOffset: number | null): void {
this.maskedOffsetToPrev[offset & WINDOW_MASK] = prevOffset;
set_prev(offset: number, prevOffset: number | null): void {
this.masked_offset_to_prev[offset & WINDOW_MASK] = prevOffset;
}
}

View File

@ -1,33 +1,33 @@
/**
* This code is based on the Sylverant PRS decompression code written by Lawrence Sebald.
*/
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import Logger from 'js-logger';
const logger = Logger.get('bin-data/compression/prs/decompress');
export function decompress(cursor: ArrayBufferCursor) {
export function decompress(cursor: BufferCursor) {
const ctx = new Context(cursor);
while (true) {
if (ctx.readFlagBit() === 1) {
if (ctx.read_flag_bit() === 1) {
// Single byte copy.
ctx.copyU8();
ctx.copy_u8();
} else {
// Multi byte copy.
let length;
let offset;
if (ctx.readFlagBit() === 0) {
if (ctx.read_flag_bit() === 0) {
// Short copy.
length = ctx.readFlagBit() << 1;
length |= ctx.readFlagBit();
length = ctx.read_flag_bit() << 1;
length |= ctx.read_flag_bit();
length += 2;
offset = ctx.readU8() - 256;
offset = ctx.read_u8() - 256;
} else {
// Long copy or end of file.
offset = ctx.readU16();
offset = ctx.read_u16();
// Two zero bytes implies that this is the end of the file.
if (offset === 0) {
@ -39,7 +39,7 @@ export function decompress(cursor: ArrayBufferCursor) {
offset >>>= 3;
if (length === 0) {
length = ctx.readU8();
length = ctx.read_u8();
length += 1;
} else {
length += 2;
@ -48,52 +48,52 @@ export function decompress(cursor: ArrayBufferCursor) {
offset -= 8192;
}
ctx.offsetCopy(offset, length);
ctx.offset_copy(offset, length);
}
}
return ctx.dst.seekStart(0);
return ctx.dst.seek_start(0);
}
class Context {
src: ArrayBufferCursor;
dst: ArrayBufferCursor;
src: BufferCursor;
dst: BufferCursor;
flags: number;
flagBitsLeft: number;
flag_bits_left: number;
constructor(cursor: ArrayBufferCursor) {
constructor(cursor: BufferCursor) {
this.src = cursor;
this.dst = new ArrayBufferCursor(4 * cursor.size, cursor.littleEndian);
this.dst = new BufferCursor(4 * cursor.size, cursor.little_endian);
this.flags = 0;
this.flagBitsLeft = 0;
this.flag_bits_left = 0;
}
readFlagBit() {
read_flag_bit() {
// Fetch a new flag byte when the previous byte has been processed.
if (this.flagBitsLeft === 0) {
this.flags = this.readU8();
this.flagBitsLeft = 8;
if (this.flag_bits_left === 0) {
this.flags = this.read_u8();
this.flag_bits_left = 8;
}
let bit = this.flags & 1;
this.flags >>>= 1;
this.flagBitsLeft -= 1;
this.flag_bits_left -= 1;
return bit;
}
copyU8() {
this.dst.writeU8(this.readU8());
copy_u8() {
this.dst.write_u8(this.read_u8());
}
readU8() {
read_u8() {
return this.src.u8();
}
readU16() {
read_u16() {
return this.src.u16();
}
offsetCopy(offset: number, length: number) {
offset_copy(offset: number, length: number) {
if (offset < -8192 || offset > 0) {
logger.error(`offset was ${offset}, should be between -8192 and 0.`);
}
@ -110,9 +110,9 @@ class Context {
this.dst.seek(-offset - bufSize);
for (let i = 0; i < Math.floor(length / bufSize); ++i) {
this.dst.writeCursor(buf);
this.dst.write_cursor(buf);
}
this.dst.writeCursor(buf.take(length % bufSize));
this.dst.write_cursor(buf.take(length % bufSize));
}
}

View File

@ -1,24 +1,24 @@
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import { compress, decompress } from '../prs';
function testWithBytes(bytes: number[], expectedCompressedSize: number) {
const cursor = new ArrayBufferCursor(new Uint8Array(bytes).buffer, true);
const cursor = new BufferCursor(new Uint8Array(bytes).buffer, true);
for (const byte of bytes) {
cursor.writeU8(byte);
cursor.write_u8(byte);
}
cursor.seekStart(0);
cursor.seek_start(0);
const compressedCursor = compress(cursor);
expect(compressedCursor.size).toBe(expectedCompressedSize);
const testCursor = decompress(compressedCursor);
cursor.seekStart(0);
cursor.seek_start(0);
expect(testCursor.size).toBe(cursor.size);
while (cursor.bytesLeft) {
while (cursor.bytes_left) {
if (cursor.u8() !== testCursor.u8()) {
cursor.seek(-1);
testCursor.seek(-1);

View File

@ -0,0 +1,93 @@
import { BufferCursor } from "../BufferCursor";
/**
* Decrypts the bytes left in cursor.
*/
export function decrypt(key: number, cursor: BufferCursor): BufferCursor {
return new PrcDecryptor(key).decrypt(cursor);
}
class PrcDecryptor {
private keys = new Uint32Array(56);
private key_pos = 56;
constructor(key: number) {
this.construct_keys(key);
}
decrypt(cursor: BufferCursor): BufferCursor {
// Size should be divisible by 4.
const actual_size = cursor.bytes_left;
const size = Math.ceil(actual_size / 4) * 4;
const out_cursor = new BufferCursor(size, cursor.little_endian);
for (let pos = 0; pos < size; pos += 4) {
let u32;
if (cursor.bytes_left >= 4) {
u32 = cursor.u32();
} else {
// If the actual size of the cursor is not divisible by 4, "append" nul bytes until it is.
const left_over = cursor.bytes_left;
u32 = 0;
for (let i = 0; i < left_over; i++) {
if (cursor.little_endian) {
u32 |= cursor.u8() << (8 * i);
} else {
u32 |= cursor.u8() << (8 * (3 - i));
}
}
}
out_cursor.write_u32(this.decrypt_u32(u32));
}
out_cursor.position = 0;
out_cursor.size = actual_size;
return out_cursor;
}
private construct_keys(key: number) {
this.keys[55] = key;
let idx;
let tmp = 1;
for (let i = 0x15; i <= 0x46E; i += 0x15) {
idx = i % 55;
key -= tmp;
this.keys[idx] = tmp;
tmp = key;
key = this.keys[idx];
}
this.mix_keys();
this.mix_keys();
this.mix_keys();
this.mix_keys();
}
private mix_keys() {
let ptr = 1;
for (let i = 24; i; --i, ++ptr) {
this.keys[ptr] -= this.keys[ptr + 31];
}
ptr = 25;
for (let i = 31; i; --i, ++ptr) {
this.keys[ptr] -= this.keys[ptr - 24];
}
}
private decrypt_u32(data: number) {
if (this.key_pos === 56) {
this.mix_keys();
this.key_pos = 1;
}
return data ^ this.keys[this.key_pos++];;
}
}

View File

@ -1,7 +1,7 @@
import { BufferGeometry } from 'three';
import { NpcType, ObjectType } from '../../domain';
import { getNpcData, getObjectData } from './binaryAssets';
import { ArrayBufferCursor } from '../ArrayBufferCursor';
import { BufferCursor } from '../BufferCursor';
import { parseNj, parseXj } from '../parsing/ninja';
const npcCache: Map<string, Promise<BufferGeometry>> = new Map();
@ -14,7 +14,7 @@ export function getNpcGeometry(npcType: NpcType): Promise<BufferGeometry> {
return geometry;
} else {
geometry = getNpcData(npcType).then(({ url, data }) => {
const cursor = new ArrayBufferCursor(data, true);
const cursor = new BufferCursor(data, true);
const object3d = url.endsWith('.nj') ? parseNj(cursor) : parseXj(cursor);
if (object3d) {
@ -36,7 +36,7 @@ export function getObjectGeometry(objectType: ObjectType): Promise<BufferGeometr
return geometry;
} else {
geometry = getObjectData(objectType).then(({ url, data }) => {
const cursor = new ArrayBufferCursor(data, true);
const cursor = new BufferCursor(data, true);
const object3d = url.endsWith('.nj') ? parseNj(cursor) : parseXj(cursor);
if (object3d) {

View File

@ -1,4 +1,4 @@
import { ArrayBufferCursor } from "../ArrayBufferCursor";
import { BufferCursor } from "../BufferCursor";
export type ItemPmt = {
statBoosts: PmtStatBoost[],
@ -94,21 +94,21 @@ export type PmtTool = {
reserved: number[],
}
export function parseItemPmt(cursor: ArrayBufferCursor): ItemPmt {
cursor.seekEnd(32);
export function parseItemPmt(cursor: BufferCursor): ItemPmt {
cursor.seek_end(32);
const mainTableOffset = cursor.u32();
const mainTableSize = cursor.u32();
// const mainTableCount = cursor.u32(); // Should be 1.
cursor.seekStart(mainTableOffset);
cursor.seek_start(mainTableOffset);
const compactTableOffsets = cursor.u16Array(mainTableSize);
const compactTableOffsets = cursor.u16_array(mainTableSize);
const tableOffsets: { offset: number, size: number }[] = [];
let expandedOffset: number = 0;
for (const compactOffset of compactTableOffsets) {
expandedOffset = expandedOffset + 4 * compactOffset;
cursor.seekStart(expandedOffset - 4);
cursor.seek_start(expandedOffset - 4);
const size = cursor.u32();
const offset = cursor.u32();
tableOffsets.push({ offset, size });
@ -137,8 +137,8 @@ export function parseItemPmt(cursor: ArrayBufferCursor): ItemPmt {
return itemPmt;
}
function parseStatBoosts(cursor: ArrayBufferCursor, offset: number, size: number): PmtStatBoost[] {
cursor.seekStart(offset);
function parseStatBoosts(cursor: BufferCursor, offset: number, size: number): PmtStatBoost[] {
cursor.seek_start(offset);
const statBoosts: PmtStatBoost[] = [];
for (let i = 0; i < size; i++) {
@ -153,8 +153,8 @@ function parseStatBoosts(cursor: ArrayBufferCursor, offset: number, size: number
return statBoosts;
}
function parseWeapons(cursor: ArrayBufferCursor, offset: number, size: number): PmtWeapon[] {
cursor.seekStart(offset);
function parseWeapons(cursor: BufferCursor, offset: number, size: number): PmtWeapon[] {
cursor.seek_start(offset);
const weapons: PmtWeapon[] = [];
for (let i = 0; i < size; i++) {
@ -182,7 +182,7 @@ function parseWeapons(cursor: ArrayBufferCursor, offset: number, size: number):
photonTrail2X: cursor.i8(),
photonTrail2Y: cursor.i8(),
photonType: cursor.i8(),
unknown1: cursor.u8Array(5),
unknown1: cursor.u8_array(5),
techBoost: cursor.u8(),
comboType: cursor.u8(),
});
@ -191,8 +191,8 @@ function parseWeapons(cursor: ArrayBufferCursor, offset: number, size: number):
return weapons;
}
function parseArmors(cursor: ArrayBufferCursor, offset: number, size: number): PmtArmor[] {
cursor.seekStart(offset);
function parseArmors(cursor: BufferCursor, offset: number, size: number): PmtArmor[] {
cursor.seek_start(offset);
const armors: PmtArmor[] = [];
for (let i = 0; i < size; i++) {
@ -224,12 +224,12 @@ function parseArmors(cursor: ArrayBufferCursor, offset: number, size: number): P
return armors;
}
function parseShields(cursor: ArrayBufferCursor, offset: number, size: number): PmtShield[] {
function parseShields(cursor: BufferCursor, offset: number, size: number): PmtShield[] {
return parseArmors(cursor, offset, size);
}
function parseUnits(cursor: ArrayBufferCursor, offset: number, size: number): PmtUnit[] {
cursor.seekStart(offset);
function parseUnits(cursor: BufferCursor, offset: number, size: number): PmtUnit[] {
cursor.seek_start(offset);
const units: PmtUnit[] = [];
for (let i = 0; i < size; i++) {
@ -241,15 +241,15 @@ function parseUnits(cursor: ArrayBufferCursor, offset: number, size: number): Pm
stat: cursor.i16(),
statAmount: cursor.i16(),
plusMinus: cursor.u8(),
reserved: cursor.u8Array(3),
reserved: cursor.u8_array(3),
});
}
return units;
}
function parseTools(cursor: ArrayBufferCursor, offset: number, size: number): PmtTool[] {
cursor.seekStart(offset);
function parseTools(cursor: BufferCursor, offset: number, size: number): PmtTool[] {
cursor.seek_start(offset);
const tools: PmtTool[] = [];
for (let i = 0; i < size; i++) {
@ -262,7 +262,7 @@ function parseTools(cursor: ArrayBufferCursor, offset: number, size: number): Pm
tech: cursor.i16(),
cost: cursor.i32(),
itemFlag: cursor.u8(),
reserved: cursor.u8Array(3),
reserved: cursor.u8_array(3),
});
}

View File

@ -6,7 +6,7 @@ import {
Quaternion,
Vector3
} from 'three';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import { parseNjModel, NjContext } from './nj';
import { parseXjModel, XjContext } from './xj';
@ -14,23 +14,23 @@ import { parseXjModel, XjContext } from './xj';
// - deal with multiple NJCM chunks
// - deal with other types of chunks
export function parseNj(cursor: ArrayBufferCursor): BufferGeometry | undefined {
export function parseNj(cursor: BufferCursor): BufferGeometry | undefined {
return parseNinja(cursor, 'nj');
}
export function parseXj(cursor: ArrayBufferCursor): BufferGeometry | undefined {
export function parseXj(cursor: BufferCursor): BufferGeometry | undefined {
return parseNinja(cursor, 'xj');
}
type Format = 'nj' | 'xj';
type Context = NjContext | XjContext;
function parseNinja(cursor: ArrayBufferCursor, format: Format): BufferGeometry | undefined {
while (cursor.bytesLeft) {
function parseNinja(cursor: BufferCursor, format: Format): BufferGeometry | undefined {
while (cursor.bytes_left) {
// Ninja uses a little endian variant of the IFF format.
// IFF files contain chunks preceded by an 8-byte header.
// The header consists of 4 ASCII characters for the "Type ID" and a 32-bit integer specifying the chunk size.
const iffTypeId = cursor.stringAscii(4, false, false);
const iffTypeId = cursor.string_ascii(4, false, false);
const iffChunkSize = cursor.u32();
if (iffTypeId === 'NJCM') {
@ -41,8 +41,8 @@ function parseNinja(cursor: ArrayBufferCursor, format: Format): BufferGeometry |
}
}
function parseNjcm(cursor: ArrayBufferCursor, format: Format): BufferGeometry | undefined {
if (cursor.bytesLeft) {
function parseNjcm(cursor: BufferCursor, format: Format): BufferGeometry | undefined {
if (cursor.bytes_left) {
let context: Context;
if (format === 'nj') {
@ -68,7 +68,7 @@ function parseNjcm(cursor: ArrayBufferCursor, format: Format): BufferGeometry |
}
function parseSiblingObjects(
cursor: ArrayBufferCursor,
cursor: BufferCursor,
parentMatrix: Matrix4,
context: Context
): void {
@ -103,17 +103,17 @@ function parseSiblingObjects(
.premultiply(parentMatrix);
if (modelOffset && !hidden) {
cursor.seekStart(modelOffset);
cursor.seek_start(modelOffset);
parseModel(cursor, matrix, context);
}
if (childOffset && !breakChildTrace) {
cursor.seekStart(childOffset);
cursor.seek_start(childOffset);
parseSiblingObjects(cursor, matrix, context);
}
if (siblingOffset) {
cursor.seekStart(siblingOffset);
cursor.seek_start(siblingOffset);
parseSiblingObjects(cursor, parentMatrix, context);
}
}
@ -130,7 +130,7 @@ function createBufferGeometry(context: Context): BufferGeometry {
return geometry;
}
function parseModel(cursor: ArrayBufferCursor, matrix: Matrix4, context: Context): void {
function parseModel(cursor: BufferCursor, matrix: Matrix4, context: Context): void {
if (context.format === 'nj') {
parseNjModel(cursor, matrix, context);
} else {

View File

@ -1,5 +1,5 @@
import { Matrix3, Matrix4, Vector3 } from 'three';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import Logger from 'js-logger';
const logger = Logger.get('bin-data/parsing/ninja/nj');
@ -39,7 +39,7 @@ interface ChunkTriangleStrip {
indices: number[];
}
export function parseNjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context: NjContext): void {
export function parseNjModel(cursor: BufferCursor, matrix: Matrix4, context: NjContext): void {
const { positions, normals, cachedChunkOffsets, vertices } = context;
const vlistOffset = cursor.u32(); // Vertex list
@ -48,7 +48,7 @@ export function parseNjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context
const normalMatrix = new Matrix3().getNormalMatrix(matrix);
if (vlistOffset) {
cursor.seekStart(vlistOffset);
cursor.seek_start(vlistOffset);
for (const chunk of parseChunks(cursor, cachedChunkOffsets, true)) {
if (chunk.chunkType === 'VERTEX') {
@ -64,7 +64,7 @@ export function parseNjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context
}
if (plistOffset) {
cursor.seekStart(plistOffset);
cursor.seek_start(plistOffset);
for (const chunk of parseChunks(cursor, cachedChunkOffsets, false)) {
if (chunk.chunkType === 'STRIP') {
@ -98,7 +98,7 @@ export function parseNjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context
}
}
function parseChunks(cursor: ArrayBufferCursor, cachedChunkOffsets: number[], wideEndChunks: boolean): Array<{
function parseChunks(cursor: BufferCursor, cachedChunkOffsets: number[], wideEndChunks: boolean): Array<{
chunkType: string,
chunkSubType: string | null,
chunkTypeId: number,
@ -134,7 +134,7 @@ function parseChunks(cursor: ArrayBufferCursor, cachedChunkOffsets: number[], wi
data = {
storeIndex: flags
};
cursor.seekStart(cachedChunkOffsets[data.storeIndex]);
cursor.seek_start(cachedChunkOffsets[data.storeIndex]);
chunks.splice(chunks.length, 0, ...parseChunks(cursor, cachedChunkOffsets, wideEndChunks));
}
} else if (8 <= chunkTypeId && chunkTypeId <= 9) {
@ -164,7 +164,7 @@ function parseChunks(cursor: ArrayBufferCursor, cachedChunkOffsets: number[], wi
size = 2 + 2 * cursor.u16();
}
cursor.seekStart(chunkStartPosition + size);
cursor.seek_start(chunkStartPosition + size);
chunks.push({
chunkType,
@ -177,7 +177,7 @@ function parseChunks(cursor: ArrayBufferCursor, cachedChunkOffsets: number[], wi
return chunks;
}
function parseChunkVertex(cursor: ArrayBufferCursor, chunkTypeId: number, flags: number): ChunkVertex[] {
function parseChunkVertex(cursor: BufferCursor, chunkTypeId: number, flags: number): ChunkVertex[] {
// There are apparently 4 different sets of vertices, ignore all but set 0.
if ((flags & 0b11) !== 0) {
return [];
@ -250,7 +250,7 @@ function parseChunkVertex(cursor: ArrayBufferCursor, chunkTypeId: number, flags:
return vertices;
}
function parseChunkTriangleStrip(cursor: ArrayBufferCursor, chunkTypeId: number): ChunkTriangleStrip[] {
function parseChunkTriangleStrip(cursor: BufferCursor, chunkTypeId: number): ChunkTriangleStrip[] {
const userOffsetAndStripCount = cursor.u16();
const userFlagsSize = userOffsetAndStripCount >>> 14;
const stripCount = userOffsetAndStripCount & 0x3FFF;

View File

@ -0,0 +1,190 @@
import Logger from 'js-logger';
import { BufferCursor } from '../../BufferCursor';
const logger = Logger.get('bin-data/parsing/ninja/njm2');
export type NjAction = {
object_offset: number,
motion: NjMotion
}
export type NjMotion = {
motion_data: NjMotionData[],
frame_count: number,
type: number,
interpolation: number,
element_count: number,
}
export type NjMotionData = {
keyframes: NjKeyframe[][],
keyframe_count: number[],
}
export type NjKeyframe = NjKeyframeF | NjKeyframeA
/**
* Used for parallel motion (POS), scale (SCL) and vector (VEC).
*/
export type NjKeyframeF = {
frame: number,
value: [number, number, number],
}
/**
* Used for rotation (ANG).
*/
export type NjKeyframeA = {
frame: number,
value: [number, number, number],
}
/**
* Format used by plymotiondata.rlc.
*/
export function parse_njm2(cursor: BufferCursor): NjAction {
cursor.seek_end(16);
const offset1 = cursor.u32();
log_offset('offset1', offset1);
cursor.seek_start(offset1);
const action_offset = cursor.u32();
log_offset('action_offset', action_offset);
cursor.seek_start(action_offset);
return parse_action(cursor);
}
function parse_action(cursor: BufferCursor): NjAction {
const object_offset = cursor.u32();
const motion_offset = cursor.u32();
log_offset('object offset', object_offset);
log_offset('motion offset', motion_offset);
cursor.seek_start(motion_offset);
const motion = parse_motion(cursor);
return {
object_offset,
motion
};
}
function parse_motion(cursor: BufferCursor): NjMotion {
// Points to an array the size of the total amount of objects in the object tree.
const mdata_offset = cursor.u32();
const frame_count = cursor.u32();
const type = cursor.u16();
const inp_fn = cursor.u16();
// Linear, spline, user function or sampling mask.
const interpolation = (inp_fn & 0b11000000) >> 6;
const element_count = inp_fn & 0b1111;
let motion_data: NjMotionData = {
keyframes: [],
keyframe_count: [],
};
const size = count_set_bits(type);
cursor.seek_start(mdata_offset);
const keyframe_offsets: number[] = [];
const keyframe_counts: number[] = [];
for (let i = 0; i < size; i++) {
keyframe_offsets.push(cursor.u32());
}
for (let i = 0; i < size; i++) {
const count = cursor.u32();
motion_data.keyframe_count.push(count);
keyframe_counts.push(count);
}
// NJD_MTYPE_POS_0
if ((type & (1 << 0)) !== 0) {
cursor.seek_start(keyframe_offsets.shift()!);
motion_data.keyframes.push(
parse_motion_data_f(cursor, keyframe_counts.shift()!)
);
}
// NJD_MTYPE_ANG_1
if ((type & (1 << 1)) !== 0) {
cursor.seek_start(keyframe_offsets.shift()!);
motion_data.keyframes.push(
parse_motion_data_a(cursor, keyframe_counts.shift()!)
);
}
// NJD_MTYPE_SCL_2
if ((type & (1 << 2)) !== 0) {
cursor.seek_start(keyframe_offsets.shift()!);
motion_data.keyframes.push(
parse_motion_data_f(cursor, keyframe_counts.shift()!)
);
}
// NJD_MTYPE_VEC_3
if ((type & (1 << 3)) !== 0) {
cursor.seek_start(keyframe_offsets.shift()!);
motion_data.keyframes.push(
parse_motion_data_f(cursor, keyframe_counts.shift()!)
);
}
// NJD_MTYPE_TARGET_3
if ((type & (1 << 6)) !== 0) {
cursor.seek_start(keyframe_offsets.shift()!);
motion_data.keyframes.push(
parse_motion_data_f(cursor, keyframe_counts.shift()!)
);
}
// TODO: all NJD_MTYPE's
return {
motion_data: [motion_data],
frame_count,
type,
interpolation,
element_count
};
}
function parse_motion_data_f(cursor: BufferCursor, count: number): NjKeyframeF[] {
const frames: NjKeyframeF[] = [];
for (let i = 0; i < count; ++i) {
frames.push({
frame: cursor.u32(),
value: [cursor.f32(), cursor.f32(), cursor.f32()],
});
}
return frames;
}
function parse_motion_data_a(cursor: BufferCursor, count: number): NjKeyframeA[] {
const frames: NjKeyframeA[] = [];
for (let i = 0; i < count; ++i) {
frames.push({
frame: cursor.u16(),
value: [cursor.i16(), cursor.i16(), cursor.i16()],
});
}
return frames;
}
function log_offset(name: string, offset: number) {
logger.debug(`${name}: 0x${offset.toString(16).toUpperCase()}`);
}
function count_set_bits(n: number): number {
let count = 0;
while (n) {
count += n & 1;
n >>= 1;
}
return count;
}

View File

@ -1,5 +1,5 @@
import { Matrix3, Matrix4, Vector3 } from 'three';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
// TODO:
// - textures
@ -14,7 +14,7 @@ export interface XjContext {
indices: number[];
}
export function parseXjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context: XjContext): void {
export function parseXjModel(cursor: BufferCursor, matrix: Matrix4, context: XjContext): void {
const { positions, normals, indices } = context;
cursor.seek(4); // Flags according to QEdit, seemingly always 0.
@ -30,14 +30,14 @@ export function parseXjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context
const indexOffset = positions.length / 3;
if (vertexInfoListOffset) {
cursor.seekStart(vertexInfoListOffset);
cursor.seek_start(vertexInfoListOffset);
cursor.seek(4); // Possibly the vertex type.
const vertexListOffset = cursor.u32();
const vertexSize = cursor.u32();
const vertexCount = cursor.u32();
for (let i = 0; i < vertexCount; ++i) {
cursor.seekStart(vertexListOffset + i * vertexSize);
cursor.seek_start(vertexListOffset + i * vertexSize);
const position = new Vector3(
cursor.f32(),
cursor.f32(),
@ -90,7 +90,7 @@ export function parseXjModel(cursor: ArrayBufferCursor, matrix: Matrix4, context
}
function parseTriangleStripList(
cursor: ArrayBufferCursor,
cursor: BufferCursor,
triangleStripListOffset: number,
triangleStripCount: number,
positions: number[],
@ -99,14 +99,14 @@ function parseTriangleStripList(
indexOffset: number
): void {
for (let i = 0; i < triangleStripCount; ++i) {
cursor.seekStart(triangleStripListOffset + i * 20);
cursor.seek_start(triangleStripListOffset + i * 20);
cursor.seek(8); // Skip material information.
const indexListOffset = cursor.u32();
const indexCount = cursor.u32();
// Ignoring 4 bytes.
cursor.seekStart(indexListOffset);
const stripIndices = cursor.u16Array(indexCount);
cursor.seek_start(indexListOffset);
const stripIndices = cursor.u16_array(indexCount);
let clockwise = true;
for (let j = 2; j < stripIndices.length; ++j) {

View File

@ -0,0 +1,24 @@
import { BufferCursor } from "../BufferCursor";
import { decrypt } from "../encryption/prc";
import { decompress } from "../compression/prs";
import Logger from 'js-logger';
const logger = Logger.get('bin-data/parsing/prc');
/**
* Decrypts and decompresses a .prc file.
*/
export function parse_prc(cursor: BufferCursor): BufferCursor {
// Unencrypted, decompressed size.
const size = cursor.u32();
let key = cursor.u32();
const out = decompress(decrypt(key, cursor));
if (out.size !== size) {
logger.warn(
`Size of decrypted, decompressed file was ${out.size} instead of expected ${size}.`
);
}
return out;
}

View File

@ -1,5 +1,5 @@
import * as fs from 'fs';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import * as prs from '../../compression/prs';
import { parseBin, writeBin } from './bin';
@ -8,15 +8,15 @@ import { parseBin, writeBin } from './bin';
*/
test('parseBin and writeBin', () => {
const origBuffer = fs.readFileSync('test/resources/quest118_e.bin').buffer;
const origBin = prs.decompress(new ArrayBufferCursor(origBuffer, true));
const origBin = prs.decompress(new BufferCursor(origBuffer, true));
const testBin = writeBin(parseBin(origBin));
origBin.seekStart(0);
origBin.seek_start(0);
expect(testBin.size).toBe(origBin.size);
let match = true;
while (origBin.bytesLeft) {
while (origBin.bytes_left) {
if (testBin.u8() !== origBin.u8()) {
match = false;
break;

View File

@ -1,4 +1,4 @@
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import Logger from 'js-logger';
const logger = Logger.get('bin-data/parsing/quest/bin');
@ -11,19 +11,19 @@ export interface BinFile {
longDescription: string;
functionOffsets: number[];
instructions: Instruction[];
data: ArrayBufferCursor;
data: BufferCursor;
}
export function parseBin(cursor: ArrayBufferCursor, lenient: boolean = false): BinFile {
export function parseBin(cursor: BufferCursor, lenient: boolean = false): BinFile {
const objectCodeOffset = cursor.u32();
const functionOffsetTableOffset = cursor.u32(); // Relative offsets
const size = cursor.u32();
cursor.seek(4); // Always seems to be 0xFFFFFFFF
const questNumber = cursor.u32();
const language = cursor.u32();
const questName = cursor.stringUtf16(64, true, true);
const shortDescription = cursor.stringUtf16(256, true, true);
const longDescription = cursor.stringUtf16(576, true, true);
const questName = cursor.string_utf16(64, true, true);
const shortDescription = cursor.string_utf16(256, true, true);
const longDescription = cursor.string_utf16(576, true, true);
if (size !== cursor.size) {
logger.warn(`Value ${size} in bin size field does not match actual size ${cursor.size}.`);
@ -32,7 +32,7 @@ export function parseBin(cursor: ArrayBufferCursor, lenient: boolean = false): B
const functionOffsetCount = Math.floor(
(cursor.size - functionOffsetTableOffset) / 4);
cursor.seekStart(functionOffsetTableOffset);
cursor.seek_start(functionOffsetTableOffset);
const functionOffsets = [];
for (let i = 0; i < functionOffsetCount; ++i) {
@ -40,7 +40,7 @@ export function parseBin(cursor: ArrayBufferCursor, lenient: boolean = false): B
}
const instructions = parseObjectCode(
cursor.seekStart(objectCodeOffset).take(functionOffsetTableOffset - objectCodeOffset),
cursor.seek_start(objectCodeOffset).take(functionOffsetTableOffset - objectCodeOffset),
lenient
);
@ -52,12 +52,12 @@ export function parseBin(cursor: ArrayBufferCursor, lenient: boolean = false): B
longDescription,
functionOffsets,
instructions,
data: cursor.seekStart(0).take(cursor.size)
data: cursor.seek_start(0).take(cursor.size)
};
}
export function writeBin({ data }: { data: ArrayBufferCursor }): ArrayBufferCursor {
return data.seekStart(0);
export function writeBin({ data }: { data: BufferCursor }): BufferCursor {
return data.seek_start(0);
}
export interface Instruction {
@ -67,11 +67,11 @@ export interface Instruction {
size: number;
}
function parseObjectCode(cursor: ArrayBufferCursor, lenient: boolean): Instruction[] {
function parseObjectCode(cursor: BufferCursor, lenient: boolean): Instruction[] {
const instructions = [];
try {
while (cursor.bytesLeft) {
while (cursor.bytes_left) {
const mainOpcode = cursor.u8();
let opcode;
let opsize;
@ -144,7 +144,7 @@ function parseObjectCode(cursor: ArrayBufferCursor, lenient: boolean): Instructi
}
function parseInstructionArguments(
cursor: ArrayBufferCursor,
cursor: BufferCursor,
mask: string
): { args: any[], size: number } {
const oldPos = cursor.position;

View File

@ -1,5 +1,5 @@
import * as fs from 'fs';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import * as prs from '../../compression/prs';
import { parseDat, writeDat } from './dat';
@ -8,15 +8,15 @@ import { parseDat, writeDat } from './dat';
*/
test('parseDat and writeDat', () => {
const origBuffer = fs.readFileSync('test/resources/quest118_e.dat').buffer;
const origDat = prs.decompress(new ArrayBufferCursor(origBuffer, true));
const origDat = prs.decompress(new BufferCursor(origBuffer, true));
const testDat = writeDat(parseDat(origDat));
origDat.seekStart(0);
origDat.seek_start(0);
expect(testDat.size).toBe(origDat.size);
let match = true;
while (origDat.bytesLeft) {
while (origDat.bytes_left) {
if (testDat.u8() !== origDat.u8()) {
match = false;
break;
@ -31,9 +31,9 @@ test('parseDat and writeDat', () => {
*/
test('parse, modify and write DAT', () => {
const origBuffer = fs.readFileSync('./test/resources/quest118_e.dat').buffer;
const origDat = prs.decompress(new ArrayBufferCursor(origBuffer, true));
const origDat = prs.decompress(new BufferCursor(origBuffer, true));
const testParsed = parseDat(origDat);
origDat.seekStart(0);
origDat.seek_start(0);
testParsed.objs[9].position.x = 13;
testParsed.objs[9].position.y = 17;
@ -45,7 +45,7 @@ test('parse, modify and write DAT', () => {
let match = true;
while (origDat.bytesLeft) {
while (origDat.bytes_left) {
if (origDat.position === 16 + 9 * 68 + 16) {
origDat.seek(12);

View File

@ -1,5 +1,5 @@
import { groupBy } from 'lodash';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import Logger from 'js-logger';
const logger = Logger.get('bin-data/parsing/quest/dat');
@ -38,12 +38,12 @@ export interface DatUnknown {
data: number[];
}
export function parseDat(cursor: ArrayBufferCursor): DatFile {
export function parseDat(cursor: BufferCursor): DatFile {
const objs: DatObject[] = [];
const npcs: DatNpc[] = [];
const unknowns: DatUnknown[] = [];
while (cursor.bytesLeft) {
while (cursor.bytes_left) {
const entityType = cursor.u32();
const totalSize = cursor.u32();
const areaId = cursor.u32();
@ -62,9 +62,9 @@ export function parseDat(cursor: ArrayBufferCursor): DatFile {
for (let i = 0; i < objectCount; ++i) {
const typeId = cursor.u16();
const unknown1 = cursor.u8Array(10);
const unknown1 = cursor.u8_array(10);
const sectionId = cursor.u16();
const unknown2 = cursor.u8Array(2);
const unknown2 = cursor.u8_array(2);
const x = cursor.f32();
const y = cursor.f32();
const z = cursor.f32();
@ -72,7 +72,7 @@ export function parseDat(cursor: ArrayBufferCursor): DatFile {
const rotationY = cursor.i32() / 0xFFFF * 2 * Math.PI;
const rotationZ = cursor.i32() / 0xFFFF * 2 * Math.PI;
// The next 3 floats seem to be scale values.
const unknown3 = cursor.u8Array(28);
const unknown3 = cursor.u8_array(28);
objs.push({
typeId,
@ -96,20 +96,20 @@ export function parseDat(cursor: ArrayBufferCursor): DatFile {
for (let i = 0; i < npcCount; ++i) {
const typeId = cursor.u16();
const unknown1 = cursor.u8Array(10);
const unknown1 = cursor.u8_array(10);
const sectionId = cursor.u16();
const unknown2 = cursor.u8Array(6);
const unknown2 = cursor.u8_array(6);
const x = cursor.f32();
const y = cursor.f32();
const z = cursor.f32();
const rotationX = cursor.i32() / 0xFFFF * 2 * Math.PI;
const rotationY = cursor.i32() / 0xFFFF * 2 * Math.PI;
const rotationZ = cursor.i32() / 0xFFFF * 2 * Math.PI;
const unknown3 = cursor.u8Array(4);
const unknown3 = cursor.u8_array(4);
const flags = cursor.f32();
const unknown4 = cursor.u8Array(12);
const unknown4 = cursor.u8_array(12);
const skin = cursor.u32();
const unknown5 = cursor.u8Array(4);
const unknown5 = cursor.u8_array(4);
npcs.push({
typeId,
@ -136,7 +136,7 @@ export function parseDat(cursor: ArrayBufferCursor): DatFile {
totalSize,
areaId,
entitiesSize,
data: cursor.u8Array(entitiesSize)
data: cursor.u8_array(entitiesSize)
});
}
}
@ -145,8 +145,8 @@ export function parseDat(cursor: ArrayBufferCursor): DatFile {
return { objs, npcs, unknowns };
}
export function writeDat({ objs, npcs, unknowns }: DatFile): ArrayBufferCursor {
const cursor = new ArrayBufferCursor(
export function writeDat({ objs, npcs, unknowns }: DatFile): BufferCursor {
const cursor = new BufferCursor(
objs.length * OBJECT_SIZE + npcs.length * NPC_SIZE + unknowns.length * 1000, true);
const groupedObjs = groupBy(objs, obj => obj.areaId);
@ -157,23 +157,23 @@ export function writeDat({ objs, npcs, unknowns }: DatFile): ArrayBufferCursor {
for (const areaId of objAreaIds) {
const areaObjs = groupedObjs[areaId];
const entitiesSize = areaObjs.length * OBJECT_SIZE;
cursor.writeU32(1); // Entity type
cursor.writeU32(entitiesSize + 16);
cursor.writeU32(areaId);
cursor.writeU32(entitiesSize);
cursor.write_u32(1); // Entity type
cursor.write_u32(entitiesSize + 16);
cursor.write_u32(areaId);
cursor.write_u32(entitiesSize);
for (const obj of areaObjs) {
cursor.writeU16(obj.typeId);
cursor.writeU8Array(obj.unknown[0]);
cursor.writeU16(obj.sectionId);
cursor.writeU8Array(obj.unknown[1]);
cursor.writeF32(obj.position.x);
cursor.writeF32(obj.position.y);
cursor.writeF32(obj.position.z);
cursor.writeI32(Math.round(obj.rotation.x / (2 * Math.PI) * 0xFFFF));
cursor.writeI32(Math.round(obj.rotation.y / (2 * Math.PI) * 0xFFFF));
cursor.writeI32(Math.round(obj.rotation.z / (2 * Math.PI) * 0xFFFF));
cursor.writeU8Array(obj.unknown[2]);
cursor.write_u16(obj.typeId);
cursor.write_u8_array(obj.unknown[0]);
cursor.write_u16(obj.sectionId);
cursor.write_u8_array(obj.unknown[1]);
cursor.write_f32(obj.position.x);
cursor.write_f32(obj.position.y);
cursor.write_f32(obj.position.z);
cursor.write_i32(Math.round(obj.rotation.x / (2 * Math.PI) * 0xFFFF));
cursor.write_i32(Math.round(obj.rotation.y / (2 * Math.PI) * 0xFFFF));
cursor.write_i32(Math.round(obj.rotation.z / (2 * Math.PI) * 0xFFFF));
cursor.write_u8_array(obj.unknown[2]);
}
}
@ -185,45 +185,45 @@ export function writeDat({ objs, npcs, unknowns }: DatFile): ArrayBufferCursor {
for (const areaId of npcAreaIds) {
const areaNpcs = groupedNpcs[areaId];
const entitiesSize = areaNpcs.length * NPC_SIZE;
cursor.writeU32(2); // Entity type
cursor.writeU32(entitiesSize + 16);
cursor.writeU32(areaId);
cursor.writeU32(entitiesSize);
cursor.write_u32(2); // Entity type
cursor.write_u32(entitiesSize + 16);
cursor.write_u32(areaId);
cursor.write_u32(entitiesSize);
for (const npc of areaNpcs) {
cursor.writeU16(npc.typeId);
cursor.writeU8Array(npc.unknown[0]);
cursor.writeU16(npc.sectionId);
cursor.writeU8Array(npc.unknown[1]);
cursor.writeF32(npc.position.x);
cursor.writeF32(npc.position.y);
cursor.writeF32(npc.position.z);
cursor.writeI32(Math.round(npc.rotation.x / (2 * Math.PI) * 0xFFFF));
cursor.writeI32(Math.round(npc.rotation.y / (2 * Math.PI) * 0xFFFF));
cursor.writeI32(Math.round(npc.rotation.z / (2 * Math.PI) * 0xFFFF));
cursor.writeU8Array(npc.unknown[2]);
cursor.writeF32(npc.flags);
cursor.writeU8Array(npc.unknown[3]);
cursor.writeU32(npc.skin);
cursor.writeU8Array(npc.unknown[4]);
cursor.write_u16(npc.typeId);
cursor.write_u8_array(npc.unknown[0]);
cursor.write_u16(npc.sectionId);
cursor.write_u8_array(npc.unknown[1]);
cursor.write_f32(npc.position.x);
cursor.write_f32(npc.position.y);
cursor.write_f32(npc.position.z);
cursor.write_i32(Math.round(npc.rotation.x / (2 * Math.PI) * 0xFFFF));
cursor.write_i32(Math.round(npc.rotation.y / (2 * Math.PI) * 0xFFFF));
cursor.write_i32(Math.round(npc.rotation.z / (2 * Math.PI) * 0xFFFF));
cursor.write_u8_array(npc.unknown[2]);
cursor.write_f32(npc.flags);
cursor.write_u8_array(npc.unknown[3]);
cursor.write_u32(npc.skin);
cursor.write_u8_array(npc.unknown[4]);
}
}
for (const unknown of unknowns) {
cursor.writeU32(unknown.entityType);
cursor.writeU32(unknown.totalSize);
cursor.writeU32(unknown.areaId);
cursor.writeU32(unknown.entitiesSize);
cursor.writeU8Array(unknown.data);
cursor.write_u32(unknown.entityType);
cursor.write_u32(unknown.totalSize);
cursor.write_u32(unknown.areaId);
cursor.write_u32(unknown.entitiesSize);
cursor.write_u8_array(unknown.data);
}
// Final header.
cursor.writeU32(0);
cursor.writeU32(0);
cursor.writeU32(0);
cursor.writeU32(0);
cursor.write_u32(0);
cursor.write_u32(0);
cursor.write_u32(0);
cursor.write_u32(0);
cursor.seekStart(0);
cursor.seek_start(0);
return cursor;
}

View File

@ -1,11 +1,11 @@
import * as fs from 'fs';
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import { parseQuest, writeQuestQst } from '.';
import { ObjectType, Quest } from '../../../domain';
test('parse Towards the Future', () => {
const buffer = fs.readFileSync('test/resources/quest118_e.qst').buffer;
const cursor = new ArrayBufferCursor(buffer, true);
const cursor = new BufferCursor(buffer, true);
const quest = parseQuest(cursor)!;
expect(quest.name).toBe('Towards the Future');
@ -27,7 +27,7 @@ test('parse Towards the Future', () => {
*/
test('parseQuest and writeQuestQst', () => {
const buffer = fs.readFileSync('test/resources/tethealla_v0.143_quests/solo/ep1/02.qst').buffer;
const cursor = new ArrayBufferCursor(buffer, true);
const cursor = new BufferCursor(buffer, true);
const origQuest = parseQuest(cursor)!;
const testQuest = parseQuest(writeQuestQst(origQuest, '02.qst'))!;

View File

@ -1,4 +1,4 @@
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import * as prs from '../../compression/prs';
import { parseDat, writeDat, DatObject, DatNpc, DatFile } from './dat';
import { parseBin, writeBin, Instruction } from './bin';
@ -22,7 +22,7 @@ const logger = Logger.get('bin-data/parsing/quest');
*
* Always delegates to parseQst at the moment.
*/
export function parseQuest(cursor: ArrayBufferCursor, lenient: boolean = false): Quest | undefined {
export function parseQuest(cursor: BufferCursor, lenient: boolean = false): Quest | undefined {
const qst = parseQst(cursor);
if (!qst) {
@ -86,7 +86,7 @@ export function parseQuest(cursor: ArrayBufferCursor, lenient: boolean = false):
);
}
export function writeQuestQst(quest: Quest, fileName: string): ArrayBufferCursor {
export function writeQuestQst(quest: Quest, fileName: string): BufferCursor {
const dat = writeDat({
objs: objectsToDatData(quest.objects),
npcs: npcsToDatData(quest.npcs),

View File

@ -1,4 +1,4 @@
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import { parseQst, writeQst } from './qst';
import { walkQstFiles } from '../../../../test/src/utils';
@ -7,18 +7,18 @@ import { walkQstFiles } from '../../../../test/src/utils';
*/
test('parseQst and writeQst', () => {
walkQstFiles((_filePath, _fileName, fileContent) => {
const origQst = new ArrayBufferCursor(fileContent.buffer, true);
const origQst = new BufferCursor(fileContent.buffer, true);
const origQuest = parseQst(origQst);
if (origQuest) {
const testQst = writeQst(origQuest);
origQst.seekStart(0);
origQst.seek_start(0);
expect(testQst.size).toBe(origQst.size);
let match = true;
while (origQst.bytesLeft) {
while (origQst.bytes_left) {
if (testQst.u8() !== origQst.u8()) {
match = false;
break;

View File

@ -1,4 +1,4 @@
import { ArrayBufferCursor } from '../../ArrayBufferCursor';
import { BufferCursor } from '../../BufferCursor';
import Logger from 'js-logger';
const logger = Logger.get('bin-data/parsing/quest/qst');
@ -8,7 +8,7 @@ interface QstContainedFile {
name2?: string; // Unsure what this is
questNo?: number;
expectedSize?: number;
data: ArrayBufferCursor;
data: BufferCursor;
chunkNos: Set<number>;
}
@ -21,7 +21,7 @@ interface ParseQstResult {
* Low level parsing function for .qst files.
* Can only read the Blue Burst format.
*/
export function parseQst(cursor: ArrayBufferCursor): ParseQstResult | undefined {
export function parseQst(cursor: BufferCursor): ParseQstResult | undefined {
// A .qst file contains two 88-byte headers that describe the embedded .dat and .bin files.
let version = 'PC';
@ -42,7 +42,7 @@ export function parseQst(cursor: ArrayBufferCursor): ParseQstResult | undefined
if (version === 'Blue Burst') {
// Read headers and contained files.
cursor.seekStart(0);
cursor.seek_start(0);
const headers = parseHeaders(cursor);
@ -72,7 +72,7 @@ interface SimpleQstContainedFile {
name: string;
name2?: string;
questNo?: number;
data: ArrayBufferCursor;
data: BufferCursor;
}
interface WriteQstParams {
@ -83,12 +83,12 @@ interface WriteQstParams {
/**
* Always writes in Blue Burst format.
*/
export function writeQst(params: WriteQstParams): ArrayBufferCursor {
export function writeQst(params: WriteQstParams): BufferCursor {
const files = params.files;
const totalSize = files
.map(f => 88 + Math.ceil(f.data.size / 1024) * 1056)
.reduce((a, b) => a + b);
const cursor = new ArrayBufferCursor(totalSize, true);
const cursor = new BufferCursor(totalSize, true);
writeFileHeaders(cursor, files);
writeFileChunks(cursor, files);
@ -97,7 +97,7 @@ export function writeQst(params: WriteQstParams): ArrayBufferCursor {
throw new Error(`Expected a final file size of ${totalSize}, but got ${cursor.size}.`);
}
return cursor.seekStart(0);
return cursor.seek_start(0);
}
interface QstHeader {
@ -110,17 +110,17 @@ interface QstHeader {
/**
* TODO: Read all headers instead of just the first 2.
*/
function parseHeaders(cursor: ArrayBufferCursor): QstHeader[] {
function parseHeaders(cursor: BufferCursor): QstHeader[] {
const headers = [];
for (let i = 0; i < 2; ++i) {
cursor.seek(4);
const questNo = cursor.u16();
cursor.seek(38);
const fileName = cursor.stringAscii(16, true, true);
const fileName = cursor.string_ascii(16, true, true);
const size = cursor.u32();
// Not sure what this is:
const fileName2 = cursor.stringAscii(24, true, true);
const fileName2 = cursor.string_ascii(24, true, true);
headers.push({
questNo,
@ -133,17 +133,17 @@ function parseHeaders(cursor: ArrayBufferCursor): QstHeader[] {
return headers;
}
function parseFiles(cursor: ArrayBufferCursor, expectedSizes: Map<string, number>): QstContainedFile[] {
function parseFiles(cursor: BufferCursor, expectedSizes: Map<string, number>): QstContainedFile[] {
// Files are interleaved in 1056 byte chunks.
// Each chunk has a 24 byte header, 1024 byte data segment and an 8 byte trailer.
const files = new Map<string, QstContainedFile>();
while (cursor.bytesLeft >= 1056) {
while (cursor.bytes_left >= 1056) {
const startPosition = cursor.position;
// Read meta data.
const chunkNo = cursor.seek(4).u8();
const fileName = cursor.seek(3).stringAscii(16, true, true);
const fileName = cursor.seek(3).string_ascii(16, true, true);
let file = files.get(fileName);
@ -152,7 +152,7 @@ function parseFiles(cursor: ArrayBufferCursor, expectedSizes: Map<string, number
files.set(fileName, file = {
name: fileName,
expectedSize,
data: new ArrayBufferCursor(expectedSize || (10 * 1024), true),
data: new BufferCursor(expectedSize || (10 * 1024), true),
chunkNos: new Set()
});
}
@ -175,7 +175,7 @@ function parseFiles(cursor: ArrayBufferCursor, expectedSizes: Map<string, number
const data = cursor.take(size);
const chunkPosition = chunkNo * 1024;
file.data.size = Math.max(chunkPosition + size, file.data.size);
file.data.seekStart(chunkPosition).writeCursor(data);
file.data.seek_start(chunkPosition).write_cursor(data);
// Skip the padding and the trailer.
cursor.seek(1032 - data.size);
@ -185,13 +185,13 @@ function parseFiles(cursor: ArrayBufferCursor, expectedSizes: Map<string, number
}
}
if (cursor.bytesLeft) {
logger.warn(`${cursor.bytesLeft} Bytes left in file.`);
if (cursor.bytes_left) {
logger.warn(`${cursor.bytes_left} Bytes left in file.`);
}
for (const file of files.values()) {
// Clean up file properties.
file.data.seekStart(0);
file.data.seek_start(0);
file.chunkNos = new Set(Array.from(file.chunkNos.values()).sort((a, b) => a - b));
// Check whether the expected size was correct.
@ -212,22 +212,22 @@ function parseFiles(cursor: ArrayBufferCursor, expectedSizes: Map<string, number
return Array.from(files.values());
}
function writeFileHeaders(cursor: ArrayBufferCursor, files: SimpleQstContainedFile[]): void {
function writeFileHeaders(cursor: BufferCursor, files: SimpleQstContainedFile[]): void {
for (const file of files) {
if (file.name.length > 16) {
throw Error(`File ${file.name} has a name longer than 16 characters.`);
}
cursor.writeU16(88); // Header size.
cursor.writeU16(0x44); // Magic number.
cursor.writeU16(file.questNo || 0);
cursor.write_u16(88); // Header size.
cursor.write_u16(0x44); // Magic number.
cursor.write_u16(file.questNo || 0);
for (let i = 0; i < 38; ++i) {
cursor.writeU8(0);
cursor.write_u8(0);
}
cursor.writeStringAscii(file.name, 16);
cursor.writeU32(file.data.size);
cursor.write_string_ascii(file.name, 16);
cursor.write_u32(file.data.size);
let fileName2: string;
@ -245,11 +245,11 @@ function writeFileHeaders(cursor: ArrayBufferCursor, files: SimpleQstContainedFi
throw Error(`File ${file.name} has a fileName2 length (${fileName2}) longer than 24 characters.`);
}
cursor.writeStringAscii(fileName2, 24);
cursor.write_string_ascii(fileName2, 24);
}
}
function writeFileChunks(cursor: ArrayBufferCursor, files: SimpleQstContainedFile[]): void {
function writeFileChunks(cursor: BufferCursor, files: SimpleQstContainedFile[]): void {
// Files are interleaved in 1056 byte chunks.
// Each chunk has a 24 byte header, 1024 byte data segment and an 8 byte trailer.
files = files.slice();
@ -274,26 +274,26 @@ function writeFileChunks(cursor: ArrayBufferCursor, files: SimpleQstContainedFil
* @returns true if there are bytes left to write in data, false otherwise.
*/
function writeFileChunk(
cursor: ArrayBufferCursor,
data: ArrayBufferCursor,
cursor: BufferCursor,
data: BufferCursor,
chunkNo: number,
name: string
): boolean {
cursor.writeU8Array([28, 4, 19, 0]);
cursor.writeU8(chunkNo);
cursor.writeU8Array([0, 0, 0]);
cursor.writeStringAscii(name, 16);
cursor.write_u8_array([28, 4, 19, 0]);
cursor.write_u8(chunkNo);
cursor.write_u8_array([0, 0, 0]);
cursor.write_string_ascii(name, 16);
const size = Math.min(1024, data.bytesLeft);
cursor.writeCursor(data.take(size));
const size = Math.min(1024, data.bytes_left);
cursor.write_cursor(data.take(size));
// Padding.
for (let i = size; i < 1024; ++i) {
cursor.writeU8(0);
cursor.write_u8(0);
}
cursor.writeU32(size);
cursor.writeU32(0);
cursor.write_u32(size);
cursor.write_u32(0);
return !!data.bytesLeft;
return !!data.bytes_left;
}

View File

@ -0,0 +1,40 @@
import { BufferCursor } from "../BufferCursor";
import Logger from 'js-logger';
import { parse_prc } from "./prc";
const logger = Logger.get('bin-data/parsing/rlc');
const MARKER = 'RelChunkVer0.20';
/**
* Container of prc files.
*
* @returns the contained files, decrypted and decompressed.
*/
export function parse_rlc(cursor: BufferCursor): BufferCursor[] {
const marker = cursor.string_ascii(16, true, true);
if (marker !== MARKER) {
logger.warn(`First 16 bytes where "${marker}" instead of expected "${MARKER}".`);
}
const table_size = cursor.u32();
cursor.seek(12);
const files: BufferCursor[] = [];
for (let i = 0; i < table_size; ++i) {
const offset = cursor.u32();
const size = cursor.u32();
const prev_pos = cursor.position;
cursor.seek_start(offset);
const file = cursor.take(size);
file.little_endian = true;
files.push(parse_prc(file));
cursor.seek_start(prev_pos);
}
return files;
}

View File

@ -1,19 +1,19 @@
import { ArrayBufferCursor } from "../ArrayBufferCursor";
import { BufferCursor } from "../BufferCursor";
import { decompress } from "../compression/prs";
export type Unitxt = string[][];
export function parseUnitxt(buf: ArrayBufferCursor, compressed: boolean = true): Unitxt {
export function parseUnitxt(buf: BufferCursor, compressed: boolean = true): Unitxt {
if (compressed) {
buf = decompress(buf);
}
const categoryCount = buf.u32();
const entryCounts = buf.u32Array(categoryCount);
const entryCounts = buf.u32_array(categoryCount);
const categoryEntryOffsets: Array<Array<number>> = [];
for (const entryCount of entryCounts) {
categoryEntryOffsets.push(buf.u32Array(entryCount));
categoryEntryOffsets.push(buf.u32_array(entryCount));
}
const categories: Unitxt = [];
@ -23,8 +23,8 @@ export function parseUnitxt(buf: ArrayBufferCursor, compressed: boolean = true):
categories.push(entries);
for (const entryOffset of categoryEntryOffset) {
buf.seekStart(entryOffset);
const str = buf.stringUtf16(1024, true, true);
buf.seek_start(entryOffset);
const str = buf.string_utf16(1024, true, true);
entries.push(str);
}
}

View File

@ -1,6 +1,6 @@
import { computed, observable } from 'mobx';
import { Object3D } from 'three';
import { ArrayBufferCursor } from '../bin-data/ArrayBufferCursor';
import { BufferCursor } from '../bin-data/BufferCursor';
import { DatNpc, DatObject, DatUnknown } from '../bin-data/parsing/quest/dat';
import { NpcType } from './NpcType';
import { ObjectType } from './ObjectType';
@ -126,7 +126,7 @@ export class Quest {
/**
* (Partial) raw BIN data that can't be parsed yet by Phantasmal.
*/
binData: ArrayBufferCursor;
binData: BufferCursor;
constructor(
name: string,
@ -138,7 +138,7 @@ export class Quest {
objects: QuestObject[],
npcs: QuestNpc[],
datUnknowns: DatUnknown[],
binData: ArrayBufferCursor
binData: BufferCursor
) {
if (questNo != null && (!Number.isInteger(questNo) || questNo < 0)) throw new Error('questNo should be null or a non-negative integer.');
checkEpisode(episode);

View File

@ -1,6 +1,6 @@
import { observable, action } from 'mobx';
import { Object3D } from 'three';
import { ArrayBufferCursor } from '../bin-data/ArrayBufferCursor';
import { BufferCursor } from '../bin-data/BufferCursor';
import { getAreaSections } from '../bin-data/loading/areas';
import { getNpcGeometry, getObjectGeometry } from '../bin-data/loading/entities';
import { parseNj, parseXj } from '../bin-data/parsing/ninja';
@ -70,11 +70,11 @@ class QuestEditorStore {
}
if (file.name.endsWith('.nj')) {
this.setModel(createModelMesh(parseNj(new ArrayBufferCursor(reader.result, true))));
this.setModel(createModelMesh(parseNj(new BufferCursor(reader.result, true))));
} else if (file.name.endsWith('.xj')) {
this.setModel(createModelMesh(parseXj(new ArrayBufferCursor(reader.result, true))));
this.setModel(createModelMesh(parseXj(new BufferCursor(reader.result, true))));
} else {
const quest = parseQuest(new ArrayBufferCursor(reader.result, true));
const quest = parseQuest(new BufferCursor(reader.result, true));
this.setQuest(quest);
if (quest) {

Binary file not shown.

View File

@ -0,0 +1,28 @@
import fs from "fs";
import { BufferCursor } from "../src/bin-data/BufferCursor";
import { parse_rlc } from "../src/bin-data/parsing/rlc";
import { parse_njm2 } from "../src/bin-data/parsing/ninja/njm2";
import Logger from 'js-logger';
const logger = Logger.get('static/updateGenericData');
Logger.useDefaults({ defaultLevel: Logger.TRACE });
/**
* Used by static data generation scripts.
*/
const RESOURCE_DIR = './static/resources';
/**
* Used by production code.
*/
const PUBLIC_DIR = './public';
update();
function update() {
const buf = fs.readFileSync(`${RESOURCE_DIR}/plymotiondata.rlc`);
for (const file of parse_rlc(new BufferCursor(buf, false))) {
logger.info(`Frame count: ${parse_njm2(file).motion.frame_count}`);
}
}