Split code into one module per tool + core and application module.

This commit is contained in:
Daan Vanden Bosch 2019-08-10 22:09:06 +02:00
parent cf1cd26c41
commit 66127253d3
171 changed files with 1743 additions and 1705 deletions

View File

@ -3,8 +3,12 @@ import { writeFileSync } from "fs";
import "isomorphic-fetch";
import Logger from "js-logger";
import { ASSETS_DIR } from ".";
import { Difficulty, NpcType, SectionId, SectionIds } from "../src/domain";
import { BoxDropDto, EnemyDropDto, ItemTypeDto } from "../src/dto";
import { Difficulty, SectionId, SectionIds } from "../src/core/domain";
import { BoxDropDto, EnemyDropDto, ItemTypeDto } from "../src/core/dto";
import {
name_and_episode_to_npc_type,
NpcType,
} from "../src/core/data_formats/parsing/quest/npc_types";
const logger = Logger.get("assets_generation/update_drops_ephinea");
@ -19,7 +23,7 @@ export async function update_drops_from_website(item_types: ItemTypeDto[]): Prom
const enemy_json = JSON.stringify(
[...normal.enemy_drops, ...hard.enemy_drops, ...vhard.enemy_drops, ...ultimate.enemy_drops],
null,
4
4,
);
writeFileSync(`${ASSETS_DIR}/enemyDrops.ephinea.json`, enemy_json);
@ -27,7 +31,7 @@ export async function update_drops_from_website(item_types: ItemTypeDto[]): Prom
const box_json = JSON.stringify(
[...normal.box_drops, ...hard.box_drops, ...vhard.box_drops, ...ultimate.box_drops],
null,
4
4,
);
writeFileSync(`${ASSETS_DIR}/boxDrops.ephinea.json`, box_json);
@ -38,7 +42,7 @@ export async function update_drops_from_website(item_types: ItemTypeDto[]): Prom
async function download(
item_types: ItemTypeDto[],
difficulty: Difficulty,
difficulty_url: string = Difficulty[difficulty].toLowerCase()
difficulty_url: string = Difficulty[difficulty].toLowerCase(),
): Promise<{ enemy_drops: EnemyDropDto[]; box_drops: BoxDropDto[]; items: Set<string> }> {
const response = await fetch(`https://ephinea.pioneer2.net/drop-charts/${difficulty_url}/`);
const body = await response.text();
@ -125,7 +129,7 @@ async function download(
throw new Error(`No item type found with name "${item}".`);
}
const npc_type = NpcType.by_name_and_episode(enemy_or_box, episode);
const npc_type = name_and_episode_to_npc_type(enemy_or_box, episode);
if (!npc_type) {
throw new Error(`Couldn't retrieve NpcType.`);
@ -149,7 +153,7 @@ async function download(
difficulty: Difficulty[difficulty],
episode,
sectionId: SectionId[section_id],
enemy: npc_type.code,
enemy: NpcType[npc_type],
itemTypeId: item_type.id,
dropRate: drop_rate_num / drop_rate_denom,
rareRate: rare_rate_num / rare_rate_denom,
@ -159,7 +163,7 @@ async function download(
} catch (e) {
logger.error(
`Error while processing item ${item} of ${enemy_or_box} in episode ${episode} ${Difficulty[difficulty]}.`,
e
e,
);
}
}
@ -167,7 +171,7 @@ async function download(
} catch (e) {
logger.error(
`Error while processing ${enemy_or_box_text} in episode ${episode} ${difficulty}.`,
e
e,
);
}
});

View File

@ -1,23 +1,16 @@
import { readdirSync, readFileSync, statSync, writeFileSync } from "fs";
import Logger from "js-logger";
import { ASSETS_DIR, RESOURCE_DIR } from ".";
import { Endianness } from "../src/data_formats";
import { BufferCursor } from "../src/data_formats/cursor/BufferCursor";
import { ItemPmt, parse_item_pmt } from "../src/data_formats/parsing/itempmt";
import { parse_quest } from "../src/data_formats/parsing/quest";
import { parse_unitxt, Unitxt } from "../src/data_formats/parsing/unitxt";
import {
Difficulties,
Difficulty,
Episode,
Episodes,
NpcType,
SectionId,
SectionIds,
} from "../src/domain";
import { NpcTypes } from "../src/domain/NpcType";
import { BoxDropDto, EnemyDropDto, ItemTypeDto, QuestDto } from "../src/dto";
import { BufferCursor } from "../src/core/data_formats/cursor/BufferCursor";
import { ItemPmt, parse_item_pmt } from "../src/core/data_formats/parsing/itempmt";
import { parse_quest } from "../src/core/data_formats/parsing/quest";
import { parse_unitxt, Unitxt } from "../src/core/data_formats/parsing/unitxt";
import { Difficulties, Difficulty, SectionId, SectionIds } from "../src/core/domain";
import { BoxDropDto, EnemyDropDto, ItemTypeDto, QuestDto } from "../src/core/dto";
import { update_drops_from_website } from "./update_drops_ephinea";
import { Episode, EPISODES } from "../src/core/data_formats/parsing/quest/Episode";
import { npc_data, NPC_TYPES, NpcType } from "../src/core/data_formats/parsing/quest/npc_types";
import { Endianness } from "../src/core/data_formats/Endianness";
const logger = Logger.get("assets_generation/update_ephinea_data");
@ -87,7 +80,7 @@ function update_quests(): void {
const id_counts = quests.reduce(
(counts, q) => counts.set(q.id, (counts.get(q.id) || 0) + 1),
new Map<number, number>()
new Map<number, number>(),
);
for (const [id, count] of id_counts.entries()) {
@ -129,8 +122,8 @@ function process_quest(path: string, quests: QuestDto[]): void {
const enemy_counts: { [npc_type_code: string]: number } = {};
for (const npc of q.npcs) {
if (npc.type.enemy) {
enemy_counts[npc.type.code] = (enemy_counts[npc.type.code] || 0) + 1;
if (npc_data(npc.type).enemy) {
enemy_counts[NpcType[npc.type]] = (enemy_counts[NpcType[npc.type]] || 0) + 1;
}
}
@ -273,7 +266,7 @@ function update_drops(item_pt: ItemPt): void {
const enemy_drops = new Array<EnemyDropDto>();
for (const diff of Difficulties) {
for (const ep of Episodes) {
for (const ep of EPISODES) {
for (const sid of SectionIds) {
enemy_drops.push(...load_enemy_drops(item_pt, diff, ep, sid));
}
@ -285,7 +278,7 @@ function update_drops(item_pt: ItemPt): void {
const box_drops = new Array<BoxDropDto>();
for (const diff of Difficulties) {
for (const ep of Episodes) {
for (const ep of EPISODES) {
for (const sid of SectionIds) {
box_drops.push(...load_box_drops(diff, ep, sid));
}
@ -330,8 +323,8 @@ function load_item_pt(): ItemPt {
cursor.seek(1608);
const enemy_dar = cursor.u8_array(100);
for (const npc of NpcTypes) {
if (npc.episode !== episode) continue;
for (const npc of NPC_TYPES) {
if (npc_data(npc).episode !== episode) continue;
switch (npc) {
case NpcType.Dragon:
@ -373,134 +366,134 @@ function load_item_pt(): ItemPt {
dar_table,
};
for (const npc of NpcTypes) {
if (npc.episode !== Episode.IV) continue;
for (const npc of NPC_TYPES) {
if (npc_data(npc).episode !== Episode.IV) continue;
switch (npc) {
case NpcType.SandRappy:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.RagRappy)!
table[Episode.I][diff][sid].dar_table.get(NpcType.RagRappy)!,
);
break;
case NpcType.DelRappy:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.AlRappy)!
table[Episode.I][diff][sid].dar_table.get(NpcType.AlRappy)!,
);
break;
case NpcType.Astark:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildebear)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildebear)!,
);
break;
case NpcType.SatelliteLizard:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.SavageWolf)!
table[Episode.I][diff][sid].dar_table.get(NpcType.SavageWolf)!,
);
break;
case NpcType.Yowie:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.BarbarousWolf)!
table[Episode.I][diff][sid].dar_table.get(NpcType.BarbarousWolf)!,
);
break;
case NpcType.MerissaA:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.PofuillySlime)!
table[Episode.I][diff][sid].dar_table.get(NpcType.PofuillySlime)!,
);
break;
case NpcType.MerissaAA:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.PouillySlime)!
table[Episode.I][diff][sid].dar_table.get(NpcType.PouillySlime)!,
);
break;
case NpcType.Girtablulu:
dar_table.set(
npc,
table[Episode.II][diff][sid].dar_table.get(NpcType.Mericarol)!
table[Episode.II][diff][sid].dar_table.get(NpcType.Mericarol)!,
);
break;
case NpcType.Zu:
dar_table.set(
npc,
table[Episode.II][diff][sid].dar_table.get(NpcType.GiGue)!
table[Episode.II][diff][sid].dar_table.get(NpcType.GiGue)!,
);
break;
case NpcType.Pazuzu:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildeblue)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildeblue)!,
);
break;
case NpcType.Boota:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Booma)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Booma)!,
);
break;
case NpcType.ZeBoota:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Gobooma)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Gobooma)!,
);
break;
case NpcType.BaBoota:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Gigobooma)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Gigobooma)!,
);
break;
case NpcType.Dorphon:
dar_table.set(
npc,
table[Episode.II][diff][sid].dar_table.get(NpcType.Delbiter)!
table[Episode.II][diff][sid].dar_table.get(NpcType.Delbiter)!,
);
break;
case NpcType.DorphonEclair:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildeblue)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Hildeblue)!,
);
break;
case NpcType.Goran:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.Dimenian)!
table[Episode.I][diff][sid].dar_table.get(NpcType.Dimenian)!,
);
break;
case NpcType.PyroGoran:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.LaDimenian)!
table[Episode.I][diff][sid].dar_table.get(NpcType.LaDimenian)!,
);
break;
case NpcType.GoranDetonator:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.SoDimenian)!
table[Episode.I][diff][sid].dar_table.get(NpcType.SoDimenian)!,
);
break;
case NpcType.SaintMilion:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!,
);
break;
case NpcType.Shambertin:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!,
);
break;
case NpcType.Kondrieu:
dar_table.set(
npc,
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!
table[Episode.I][diff][sid].dar_table.get(NpcType.DarkFalz)!,
);
break;
}
@ -516,11 +509,11 @@ function load_enemy_drops(
item_pt: ItemPt,
difficulty: Difficulty,
episode: Episode,
section_id: SectionId
section_id: SectionId,
): EnemyDropDto[] {
const drops: EnemyDropDto[] = [];
const drops_buf = readFileSync(
`${EPHINEA_RESOURCE_DIR}/login-config/drop/ep${episode}_mob_${difficulty}_${section_id}.txt`
`${EPHINEA_RESOURCE_DIR}/login-config/drop/ep${episode}_mob_${difficulty}_${section_id}.txt`,
);
let line_no = 0;
@ -539,13 +532,13 @@ function load_enemy_drops(
const dar = item_pt[episode][difficulty][section_id].dar_table.get(enemy);
if (dar == null) {
logger.error(`No DAR found for ${enemy.name}.`);
logger.error(`No DAR found for ${NpcType[enemy]}.`);
} else if (rare_rate > 0 && item_type_id) {
drops.push({
difficulty: Difficulty[difficulty],
episode,
sectionId: SectionId[section_id],
enemy: enemy.code,
enemy: NpcType[enemy],
itemTypeId: item_type_id,
dropRate: dar,
rareRate: rare_rate,
@ -564,11 +557,11 @@ function load_enemy_drops(
function load_box_drops(
difficulty: Difficulty,
episode: Episode,
section_id: SectionId
section_id: SectionId,
): BoxDropDto[] {
const drops: BoxDropDto[] = [];
const drops_buf = readFileSync(
`${EPHINEA_RESOURCE_DIR}/login-config/drop/ep${episode}_box_${difficulty}_${section_id}.txt`
`${EPHINEA_RESOURCE_DIR}/login-config/drop/ep${episode}_box_${difficulty}_${section_id}.txt`,
);
let line_no = 0;
@ -606,7 +599,7 @@ function load_box_drops(
function get_stat_boosts(
item_pmt: ItemPmt,
stat_boost_index: number
stat_boost_index: number,
): {
atp: number;
ata: number;

View File

@ -1,10 +1,10 @@
import { readFileSync, writeFileSync } from "fs";
import Logger from "js-logger";
import { ASSETS_DIR, RESOURCE_DIR, SRC_DIR } from ".";
import { Endianness } from "../src/data_formats";
import { BufferCursor } from "../src/data_formats/cursor/BufferCursor";
import { parse_rlc } from "../src/data_formats/parsing/rlc";
import { BufferCursor } from "../src/core/data_formats/cursor/BufferCursor";
import { parse_rlc } from "../src/core/data_formats/parsing/rlc";
import YAML from "yaml";
import { Endianness } from "../src/core/data_formats/Endianness";
const logger = Logger.get("assets_generation/update_generic_data");
@ -30,7 +30,7 @@ function extract_player_animations(): void {
for (const file of parse_rlc(new BufferCursor(buf, Endianness.Big))) {
writeFileSync(
`${ASSETS_DIR}/player/animation/animation_${(i++).toString().padStart(3, "0")}.njm`,
new Uint8Array(file.array_buffer())
new Uint8Array(file.array_buffer()),
);
}
@ -137,7 +137,7 @@ function opcode_to_code(output: string[], code: number, opcode?: any): void {
}
}
function params_to_code(params: any[]) {
function params_to_code(params: any[]): string {
return params
.map((param: any) => {
let type: string;
@ -181,7 +181,7 @@ function params_to_code(params: any[]) {
break;
case "reg_tup_ref":
type = `{ kind: Kind.RegTupRef, register_tuples: [${params_to_code(
param.reg_tup
param.reg_tup,
)}] }`;
break;
case "reg_ref_var":

View File

@ -1,5 +1,5 @@
import { autorun, observable } from "mobx";
import { Server } from "../domain";
import { Server } from "../../core/domain";
class ApplicationStore {
@observable current_server: Server = Server.Ephinea;

View File

@ -2,13 +2,13 @@ import { Menu, Select } from "antd";
import { ClickParam } from "antd/lib/menu";
import { observer } from "mobx-react";
import React, { ReactNode, Component } from "react";
import { Server } from "../domain";
import { Server } from "../../core/domain";
import styles from "./ApplicationComponent.css";
import { DpsCalcComponent } from "./dps_calc/DpsCalcComponent";
import { with_error_boundary } from "./ErrorBoundary";
import { HuntOptimizerComponent } from "./hunt_optimizer/HuntOptimizerComponent";
import { QuestEditorComponent } from "./quest_editor/QuestEditorComponent";
import { ViewerComponent } from "./viewer/ViewerComponent";
import { DpsCalcComponent } from "../../dps_calc/ui/DpsCalcComponent";
import { with_error_boundary } from "../../core/ui/ErrorBoundary";
import { HuntOptimizerComponent } from "../../hunt_optimizer/ui/HuntOptimizerComponent";
import { QuestEditorComponent } from "../../quest_editor/ui/QuestEditorComponent";
import { ViewerComponent } from "../../viewer/ui/ViewerComponent";
import { application_store } from "../stores/ApplicationStore";
const Viewer = with_error_boundary(ViewerComponent);

View File

@ -65,7 +65,7 @@ class Context {
this.src = cursor;
this.dst = new ResizableBufferCursor(
new ResizableBuffer(Math.floor(1.5 * cursor.size)),
cursor.endianness
cursor.endianness,
);
this.flags = 0;
this.flag_bits_left = 0;

View File

@ -83,7 +83,7 @@ test("PRS compression and decompression of quest118_e.bin", () => {
if (test_byte !== orig_byte) {
throw new Error(
`Byte ${matching_bytes} didn't match, expected ${orig_byte}, got ${test_byte}.`
`Byte ${matching_bytes} didn't match, expected ${orig_byte}, got ${test_byte}.`,
);
}

View File

@ -189,7 +189,11 @@ export abstract class AbstractCursor implements Cursor {
return new Vec3(this.f32(), this.f32(), this.f32());
}
string_ascii(max_byte_length: number, null_terminated: boolean, drop_remaining: boolean) {
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean,
): string {
let code_points: number[] = [];
for (let i = 0; i < max_byte_length; i++) {
@ -209,7 +213,11 @@ export abstract class AbstractCursor implements Cursor {
return String.fromCodePoint(...code_points);
}
string_utf16(max_byte_length: number, null_terminated: boolean, drop_remaining: boolean) {
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean,
): string {
let code_points: number[] = [];
let len = Math.floor(max_byte_length / 2);
@ -234,7 +242,7 @@ export abstract class AbstractCursor implements Cursor {
this.check_size("size", size, size);
const r = this.backing_buffer.slice(
this.offset + this.position,
this.offset + this.position + size
this.offset + this.position + size,
);
this._position += size;
return r;

View File

@ -106,7 +106,7 @@ export abstract class AbstractWritableCursor extends AbstractCursor implements W
other.copy_to_uint8_array(
new Uint8Array(this.backing_buffer, this.offset + this.position, size),
size
size,
);
this._position += size;

View File

@ -33,7 +33,7 @@ export class ArrayBufferCursor extends AbstractWritableCursor implements Writabl
buffer: ArrayBuffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.byteLength - offset
size: number = buffer.byteLength - offset,
) {
super(endianness, offset);
this._size = size;

View File

@ -23,7 +23,7 @@ export class BufferCursor extends AbstractCursor implements Cursor {
buffer: Buffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.byteLength - offset
size: number = buffer.byteLength - offset,
) {
if (offset < 0 || offset > buffer.byteLength) {
throw new Error(`Offset ${offset} is out of bounds.`);

View File

@ -16,7 +16,7 @@ import { ResizableBufferCursor } from "./ResizableBufferCursor";
function test_all(
name: string,
bytes: (endianness: Endianness) => number[],
run_test: (cursor: Cursor, endianness: Endianness) => void
run_test: (cursor: Cursor, endianness: Endianness) => void,
): void {
const endiannesses = enum_values<Endianness>(Endianness);
@ -69,7 +69,7 @@ test_all(
expect(cursor.size).toBe(cursor.position + cursor.bytes_left);
expect(cursor.endianness).toBe(endianness);
}
}
},
);
test_all(
@ -81,7 +81,7 @@ test_all(
} else {
expect(cursor.u32()).toBe(0x01020304);
}
}
},
);
/**
@ -120,7 +120,7 @@ function test_integer_read(method_name: string): void {
expect((cursor as any)[method_name]()).toBe(expected_number_2);
expect(cursor.position).toBe(2 * byte_count);
}
},
);
}
@ -138,7 +138,7 @@ test_all(
expect(cursor.u8_array(3)).toEqual([1, 2, 3]);
expect(cursor.seek_start(2).u8_array(4)).toEqual([3, 4, 5, 6]);
expect(cursor.seek_start(5).u8_array(3)).toEqual([6, 7, 8]);
}
},
);
test_all(
@ -148,7 +148,7 @@ test_all(
expect(cursor.u16_array(3)).toEqual([0x0101, 0x0202, 0x0303]);
expect(cursor.seek_start(4).u16_array(4)).toEqual([0x0303, 0x0404, 0x0505, 0x0606]);
expect(cursor.seek_start(10).u16_array(3)).toEqual([0x0606, 0x0707, 0x0808]);
}
},
);
function test_string_read(method_name: string, char_size: number): void {
@ -194,7 +194,7 @@ function test_string_read(method_name: string, char_size: number): void {
cursor.seek_start(char_size);
expect((cursor as any)[method_name](4 * char_size, false, false)).toBe("AB\0ÿ");
expect(cursor.position).toBe(5 * char_size);
}
},
);
}

View File

@ -157,7 +157,7 @@ export interface Cursor {
string_ascii(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
drop_remaining: boolean,
): string;
/**
@ -166,7 +166,7 @@ export interface Cursor {
string_utf16(
max_byte_length: number,
null_terminated: boolean,
drop_remaining: boolean
drop_remaining: boolean,
): string;
array_buffer(size?: number): ArrayBuffer;

View File

@ -38,7 +38,7 @@ export class ResizableBufferCursor extends AbstractWritableCursor implements Wri
buffer: ResizableBuffer,
endianness: Endianness,
offset: number = 0,
size: number = buffer.size - offset
size: number = buffer.size - offset,
) {
if (offset < 0 || offset > buffer.size) {
throw new Error(`Offset ${offset} is out of bounds.`);

View File

@ -15,7 +15,7 @@ import { WritableCursor } from "./WritableCursor";
function test_all(
name: string,
bytes: (endianness: Endianness) => number[],
run_test: (cursor: WritableCursor, endianness: Endianness) => void
run_test: (cursor: WritableCursor, endianness: Endianness) => void,
): void {
const endiannesses = enum_values<Endianness>(Endianness);
@ -69,7 +69,7 @@ test_all(
expect(cursor.position).toBe(3);
expect(cursor.bytes_left).toBe(7);
expect(cursor.endianness).toBe(endianness);
}
},
);
/**
@ -106,7 +106,7 @@ function test_integer_write(method_name: string): void {
expect((cursor as any)[read_method_name]()).toBe(expected_number_1);
expect((cursor as any)[read_method_name]()).toBe(expected_number_2);
}
},
);
}
@ -135,7 +135,7 @@ test_all(
expect(cursor.f32()).toBeCloseTo(103.502, 3);
expect(cursor.position).toBe(8);
}
},
);
test_all(
@ -169,7 +169,7 @@ test_all(
}
expect(cursor.position).toBe(20);
}
},
);
test_all(
@ -189,5 +189,5 @@ test_all(
expect(new_cursor.position).toBe(0);
expect(new_cursor.u32()).toBe(3);
expect(new_cursor.u32()).toBe(4);
}
},
);

View File

@ -41,7 +41,7 @@ export function parse_area_geometry(cursor: Cursor): RenderObject {
const section_rotation = new Vec3(
cursor.u32() * ANGLE_TO_RAD,
cursor.u32() * ANGLE_TO_RAD,
cursor.u32() * ANGLE_TO_RAD
cursor.u32() * ANGLE_TO_RAD,
);
cursor.seek(4);
@ -56,7 +56,7 @@ export function parse_area_geometry(cursor: Cursor): RenderObject {
const objects = parse_geometry_table(
cursor,
simple_geometry_offset_table_offset,
simple_geometry_offset_count
simple_geometry_offset_count,
);
sections.push({
@ -74,7 +74,7 @@ export function parse_area_geometry(cursor: Cursor): RenderObject {
function parse_geometry_table(
cursor: Cursor,
table_offset: number,
table_entry_count: number
table_entry_count: number,
): NjObject<XjModel>[] {
const objects: NjObject<XjModel>[] = [];

View File

@ -35,7 +35,7 @@ export class NjObject<M extends NjModel> {
position: Vec3,
rotation: Vec3, // Euler angles in radians.
scale: Vec3,
children: NjObject<M>[]
children: NjObject<M>[],
) {
this.evaluation_flags = evaluation_flags;
this.model = model;
@ -70,7 +70,7 @@ export class NjObject<M extends NjModel> {
private get_bone_internal(
object: NjObject<M>,
bone_id: number,
id_ref: [number]
id_ref: [number],
): NjObject<M> | undefined {
if (!object.evaluation_flags.skip) {
const id = id_ref[0]++;
@ -125,7 +125,7 @@ export function parse_xj_object(cursor: Cursor): NjObject<XjModel>[] {
function parse_ninja<M extends NjModel>(
cursor: Cursor,
parse_model: (cursor: Cursor, context: any) => M,
context: any
context: any,
): NjObject<M>[] {
// POF0 and other chunks types are ignored.
const njcm_chunks = parse_iff(cursor).filter(chunk => chunk.type === NJCM);
@ -142,7 +142,7 @@ function parse_ninja<M extends NjModel>(
function parse_sibling_objects<M extends NjModel>(
cursor: Cursor,
parse_model: (cursor: Cursor, context: any) => M,
context: any
context: any,
): NjObject<M>[] {
const eval_flags = cursor.u32();
const no_translate = (eval_flags & 0b1) !== 0;
@ -205,7 +205,7 @@ function parse_sibling_objects<M extends NjModel>(
new Vec3(pos_x, pos_y, pos_z),
new Vec3(rotation_x, rotation_y, rotation_z),
new Vec3(scale_x, scale_y, scale_z),
children
children,
);
return [object, ...siblings];

View File

@ -1,4 +1,4 @@
import { ANGLE_TO_RAD } from ".";
import { ANGLE_TO_RAD } from "./index";
import { Cursor } from "../../cursor/Cursor";
import { Vec3 } from "../../vector";
@ -201,7 +201,7 @@ function parse_motion_data_f(cursor: Cursor, count: number): NjKeyframeF[] {
function parse_motion_data_a(
cursor: Cursor,
keyframe_count: number,
frame_count: number
frame_count: number,
): NjKeyframeA[] {
const frames: NjKeyframeA[] = [];
const start_pos = cursor.position;
@ -212,7 +212,7 @@ function parse_motion_data_a(
value: new Vec3(
cursor.u16() * ANGLE_TO_RAD,
cursor.u16() * ANGLE_TO_RAD,
cursor.u16() * ANGLE_TO_RAD
cursor.u16() * ANGLE_TO_RAD,
),
});
}
@ -240,7 +240,7 @@ function parse_motion_data_a_wide(cursor: Cursor, keyframe_count: number): NjKey
value: new Vec3(
cursor.i32() * ANGLE_TO_RAD,
cursor.i32() * ANGLE_TO_RAD,
cursor.i32() * ANGLE_TO_RAD
cursor.i32() * ANGLE_TO_RAD,
),
});
}

View File

@ -201,7 +201,7 @@ export function parse_njcm_model(cursor: Cursor, cached_chunk_offsets: number[])
function parse_chunks(
cursor: Cursor,
cached_chunk_offsets: number[],
wide_end_chunks: boolean
wide_end_chunks: boolean,
): NjcmChunk[] {
const chunks: NjcmChunk[] = [];
let loop = true;
@ -313,7 +313,7 @@ function parse_chunks(
function parse_vertex_chunk(
cursor: Cursor,
chunk_type_id: number,
flags: number
flags: number,
): NjcmChunkVertex[] {
if (chunk_type_id < 32 || chunk_type_id > 50) {
logger.warn(`Unknown vertex chunk type ${chunk_type_id}.`);
@ -375,7 +375,7 @@ function parse_vertex_chunk(
vertex.normal = new Vec3(
((normal >> 20) & 0x3ff) / 0x3ff,
((normal >> 10) & 0x3ff) / 0x3ff,
(normal & 0x3ff) / 0x3ff
(normal & 0x3ff) / 0x3ff,
);
if (chunk_type_id >= 49) {
@ -393,7 +393,7 @@ function parse_vertex_chunk(
function parse_triangle_strip_chunk(
cursor: Cursor,
chunk_type_id: number,
flags: number
flags: number,
): NjcmTriangleStrip[] {
const render_flags = {
ignore_light: (flags & 0b1) !== 0,
@ -474,7 +474,7 @@ function parse_triangle_strip_chunk(
vertex.normal = new Vec3(
cursor.u16() / 255,
cursor.u16() / 255,
cursor.u16() / 255
cursor.u16() / 255,
);
}

View File

@ -37,7 +37,7 @@ export function parse_xvm(cursor: Cursor): Xvm {
logger.warn("No header found.");
} else if (header.texture_count !== textures.length) {
logger.warn(
`Found ${textures.length} textures instead of ${header.texture_count} as defined in the header.`
`Found ${textures.length} textures instead of ${header.texture_count} as defined in the header.`,
);
}

View File

@ -65,15 +65,15 @@ export function parse_xj_model(cursor: Cursor): XjModel {
}
model.meshes.push(
...parse_triangle_strip_table(cursor, triangle_strip_table_offset, triangle_strip_count)
...parse_triangle_strip_table(cursor, triangle_strip_table_offset, triangle_strip_count),
);
model.meshes.push(
...parse_triangle_strip_table(
cursor,
transparent_triangle_strip_table_offset,
transparent_triangle_strip_count
)
transparent_triangle_strip_count,
),
);
return model;
@ -133,7 +133,7 @@ function parse_vertex_info_table(cursor: Cursor, vertex_info_table_offset: numbe
function parse_triangle_strip_table(
cursor: Cursor,
triangle_strip_list_offset: number,
triangle_strip_count: number
triangle_strip_count: number,
): XjMesh[] {
const strips: XjMesh[] = [];
@ -148,7 +148,7 @@ function parse_triangle_strip_table(
const material_properties = parse_triangle_strip_material_properties(
cursor,
material_table_offset,
material_table_size
material_table_size,
);
cursor.seek_start(index_list_offset);
@ -166,7 +166,7 @@ function parse_triangle_strip_table(
function parse_triangle_strip_material_properties(
cursor: Cursor,
offset: number,
size: number
size: number,
): XjMaterialProperties {
const props: XjMaterialProperties = {};

View File

@ -16,7 +16,7 @@ export function parse_prc(cursor: Cursor): Cursor {
if (out.size !== size) {
logger.warn(
`Size of decrypted, decompressed file was ${out.size} instead of expected ${size}.`
`Size of decrypted, decompressed file was ${out.size} instead of expected ${size}.`,
);
}

View File

@ -8,7 +8,7 @@ import { parse_bin, write_bin } from "./bin";
/**
* Parse a file, convert the resulting structure to BIN again and check whether the end result is equal to the original.
*/
function test_quest(path: string) {
function test_quest(path: string): void {
const orig_buffer = readFileSync(path);
const orig_bin = prs_decompress(new BufferCursor(orig_buffer, Endianness.Little));
const test_buffer = write_bin(parse_bin(orig_bin));
@ -25,7 +25,7 @@ function test_quest(path: string) {
if (test_byte !== orig_byte) {
throw new Error(
`Byte ${matching_bytes} didn't match, expected ${orig_byte}, got ${test_byte}.`
`Byte ${matching_bytes} didn't match, expected ${orig_byte}, got ${test_byte}.`,
);
}

View File

@ -1,8 +1,8 @@
import Logger from "js-logger";
import { Endianness } from "../../Endianness";
import { ControlFlowGraph } from "../../../scripting/data_flow_analysis/ControlFlowGraph";
import { register_value } from "../../../scripting/data_flow_analysis/register_value";
import { stack_value } from "../../../scripting/data_flow_analysis/stack_value";
import { ControlFlowGraph } from "../../../../quest_editor/scripting/data_flow_analysis/ControlFlowGraph";
import { register_value } from "../../../../quest_editor/scripting/data_flow_analysis/register_value";
import { stack_value } from "../../../../quest_editor/scripting/data_flow_analysis/stack_value";
import {
Arg,
DataSegment,
@ -11,8 +11,13 @@ import {
Segment,
SegmentType,
StringSegment,
} from "../../../scripting/instructions";
import { Kind, Opcode, OPCODES, StackInteraction } from "../../../scripting/opcodes";
} from "../../../../quest_editor/scripting/instructions";
import {
Kind,
Opcode,
OPCODES,
StackInteraction,
} from "../../../../quest_editor/scripting/opcodes";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { Cursor } from "../../cursor/Cursor";
import { ResizableBufferCursor } from "../../cursor/ResizableBufferCursor";

View File

@ -58,7 +58,7 @@ export function parse_dat(cursor: Cursor): DatFile {
if (entities_size !== total_size - 16) {
throw Error(
`Malformed DAT file. Expected an entities size of ${total_size -
16}, got ${entities_size}.`
16}, got ${entities_size}.`,
);
}
@ -94,7 +94,7 @@ export function parse_dat(cursor: Cursor): DatFile {
if (bytes_read !== entities_size) {
logger.warn(
`Read ${bytes_read} bytes instead of expected ${entities_size} for entity type ${entity_type} (Object).`
`Read ${bytes_read} bytes instead of expected ${entities_size} for entity type ${entity_type} (Object).`,
);
cursor.seek(entities_size - bytes_read);
}
@ -133,7 +133,7 @@ export function parse_dat(cursor: Cursor): DatFile {
if (bytes_read !== entities_size) {
logger.warn(
`Read ${bytes_read} bytes instead of expected ${entities_size} for entity type ${entity_type} (NPC).`
`Read ${bytes_read} bytes instead of expected ${entities_size} for entity type ${entity_type} (NPC).`,
);
cursor.seek(entities_size - bytes_read);
}
@ -157,7 +157,7 @@ export function write_dat({ objs, npcs, unknowns }: DatFile): ResizableBuffer {
const buffer = new ResizableBuffer(
objs.length * (16 + OBJECT_SIZE) +
npcs.length * (16 + NPC_SIZE) +
unknowns.reduce((a, b) => a + b.total_size, 0)
unknowns.reduce((a, b) => a + b.total_size, 0),
);
const cursor = new ResizableBufferCursor(buffer, Endianness.Little);

View File

@ -1,6 +1,6 @@
import { readFileSync } from "fs";
import { Endianness } from "../../Endianness";
import { walk_qst_files } from "../../../../test/src/utils";
import { walk_qst_files } from "../../../../../test/src/utils";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";
import { BufferCursor } from "../../cursor/BufferCursor";
import { parse_quest, write_quest_qst } from "./index";

View File

@ -4,8 +4,8 @@ import {
InstructionSegment,
Segment,
SegmentType,
} from "../../../scripting/instructions";
import { Opcode } from "../../../scripting/opcodes";
} from "../../../../quest_editor/scripting/instructions";
import { Opcode } from "../../../../quest_editor/scripting/opcodes";
import { prs_compress } from "../../compression/prs/compress";
import { prs_decompress } from "../../compression/prs/decompress";
import { ArrayBufferCursor } from "../../cursor/ArrayBufferCursor";

View File

@ -1,5 +1,6 @@
import { Episode, check_episode } from "./Episode";
// Make sure ObjectType does not overlap NpcType.
export enum NpcType {
//
// Unknown NPCs
@ -181,7 +182,6 @@ export enum NpcType {
GoranDetonator,
SaintMilion,
Shambertin,
// Kondrieu should be last to make sure ObjectType does not overlap NpcType. See code below.
Kondrieu,
}
@ -201,6 +201,7 @@ export type NpcTypeData = {
readonly rare_type?: NpcType;
};
export const NPC_TYPES: NpcType[] = [];
export const ENEMY_NPC_TYPES: NpcType[] = [];
export function npc_data(type: NpcType): NpcTypeData {
@ -234,13 +235,10 @@ function define_npc_type_data(
enemy: boolean,
rare_type?: NpcType,
): void {
if (episode) {
const map = EP_AND_NAME_TO_NPC_TYPE[episode];
NPC_TYPES.push(npc_type);
if (map) {
map.set(simple_name, npc_type);
map.set(ultimate_name, npc_type);
}
if (enemy) {
ENEMY_NPC_TYPES.push(npc_type);
}
NPC_TYPE_DATA[npc_type] = {
@ -252,8 +250,13 @@ function define_npc_type_data(
rare_type,
};
if (enemy) {
ENEMY_NPC_TYPES.push(npc_type);
if (episode) {
const map = EP_AND_NAME_TO_NPC_TYPE[episode];
if (map) {
map.set(simple_name, npc_type);
map.set(ultimate_name, npc_type);
}
}
}

View File

@ -1,4 +1,4 @@
import { walk_qst_files } from "../../../../test/src/utils";
import { walk_qst_files } from "../../../../../test/src/utils";
import { parse_qst, write_qst } from "./qst";
import { Endianness } from "../../Endianness";
import { BufferCursor } from "../../cursor/BufferCursor";

View File

@ -167,16 +167,16 @@ function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstCo
expected_size,
cursor: new ResizableBufferCursor(
new ResizableBuffer(expected_size || 10 * 1024),
Endianness.Little
Endianness.Little,
),
chunk_nos: new Set(),
})
}),
);
}
if (file.chunk_nos.has(chunk_no)) {
logger.warn(
`File chunk number ${chunk_no} of file ${file_name} was already encountered, overwriting previous chunk.`
`File chunk number ${chunk_no} of file ${file_name} was already encountered, overwriting previous chunk.`,
);
} else {
file.chunk_nos.add(chunk_no);
@ -188,7 +188,7 @@ function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstCo
if (size > 1024) {
logger.warn(
`Data segment size of ${size} is larger than expected maximum size, reading just 1024 bytes.`
`Data segment size of ${size} is larger than expected maximum size, reading just 1024 bytes.`,
);
size = 1024;
}
@ -204,7 +204,7 @@ function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstCo
if (cursor.position !== start_position + 1056) {
throw new Error(
`Read ${cursor.position -
start_position} file chunk message bytes instead of expected 1056.`
start_position} file chunk message bytes instead of expected 1056.`,
);
}
}
@ -221,7 +221,7 @@ function parse_files(cursor: Cursor, expected_sizes: Map<string, number>): QstCo
// Check whether the expected size was correct.
if (file.expected_size != null && file.cursor.size !== file.expected_size) {
logger.warn(
`File ${file.name} has an actual size of ${file.cursor.size} instead of the expected size ${file.expected_size}.`
`File ${file.name} has an actual size of ${file.cursor.size} instead of the expected size ${file.expected_size}.`,
);
}
@ -279,7 +279,7 @@ function write_file_headers(cursor: WritableCursor, files: QstContainedFileParam
if (file_name_2.length > 24) {
throw Error(
`File ${file.name} has a file_name_2 length (${file_name_2}) longer than 24 characters.`
`File ${file.name} has a file_name_2 length (${file_name_2}) longer than 24 characters.`,
);
}
@ -305,7 +305,7 @@ function write_file_chunks(cursor: WritableCursor, files: QstContainedFileParam[
cursor,
file_to_chunk.data,
file_to_chunk.no++,
file_to_chunk.name
file_to_chunk.name,
)
) {
done++;
@ -319,7 +319,7 @@ function write_file_chunks(cursor: WritableCursor, files: QstContainedFileParam[
if (file_to_chunk.no !== expected_chunks) {
throw new Error(
`Expected to write ${expected_chunks} chunks for file "${file_to_chunk.name}" but ${file_to_chunk.no} where written.`
`Expected to write ${expected_chunks} chunks for file "${file_to_chunk.name}" but ${file_to_chunk.no} where written.`,
);
}
}
@ -332,7 +332,7 @@ function write_file_chunk(
cursor: WritableCursor,
data: Cursor,
chunk_no: number,
name: string
name: string,
): boolean {
cursor.write_u8_array([28, 4, 19, 0]);
cursor.write_u8(chunk_no);

34
src/core/domain/index.ts Normal file
View File

@ -0,0 +1,34 @@
import { enum_values } from "../enums";
export const RARE_ENEMY_PROB = 1 / 512;
export const KONDRIEU_PROB = 1 / 10;
export enum Server {
Ephinea = "Ephinea",
}
export const Servers: Server[] = enum_values(Server);
export enum SectionId {
Viridia,
Greenill,
Skyly,
Bluefull,
Purplenum,
Pinkal,
Redria,
Oran,
Yellowboze,
Whitill,
}
export const SectionIds: SectionId[] = enum_values(SectionId);
export enum Difficulty {
Normal,
Hard,
VHard,
Ultimate,
}
export const Difficulties: Difficulty[] = enum_values(Difficulty);

View File

@ -19,7 +19,7 @@ export class WeaponItemType implements ItemType {
readonly max_atp: number,
readonly ata: number,
readonly max_grind: number,
readonly required_atp: number
readonly required_atp: number,
) {}
}
@ -35,7 +35,7 @@ export class ArmorItemType implements ItemType {
readonly max_dfp: number,
readonly mst: number,
readonly hp: number,
readonly lck: number
readonly lck: number,
) {}
}
@ -51,7 +51,7 @@ export class ShieldItemType implements ItemType {
readonly max_dfp: number,
readonly mst: number,
readonly hp: number,
readonly lck: number
readonly lck: number,
) {}
}

View File

@ -1,5 +1,5 @@
import Logger from "js-logger";
import { Server } from "../domain";
import { Server } from "./domain";
const logger = Logger.get("persistence/Persister");

View File

@ -96,7 +96,7 @@ export class Renderer<C extends PerspectiveCamera | OrthographicCamera> {
position.z,
look_at.x,
look_at.y,
look_at.z
look_at.z,
);
}

View File

@ -58,7 +58,7 @@ export class GeometryBuilder {
return new Vector3(
this.positions[3 * index],
this.positions[3 * index + 1],
this.positions[3 * index + 2]
this.positions[3 * index + 2],
);
}
@ -66,7 +66,7 @@ export class GeometryBuilder {
return new Vector3(
this.normals[3 * index],
this.normals[3 * index + 1],
this.normals[3 * index + 2]
this.normals[3 * index + 2],
);
}

View File

@ -26,7 +26,7 @@ const DEFAULT_SKINNED_MATERIAL = new MeshLambertMaterial({
export function create_mesh(
geometry: BufferGeometry,
material?: Material | Material[],
default_material: Material = DEFAULT_MATERIAL
default_material: Material = DEFAULT_MATERIAL,
): Mesh {
return create(geometry, material, default_material, Mesh);
}
@ -34,7 +34,7 @@ export function create_mesh(
export function create_skinned_mesh(
geometry: BufferGeometry,
material?: Material | Material[],
default_material: Material = DEFAULT_SKINNED_MATERIAL
default_material: Material = DEFAULT_SKINNED_MATERIAL,
): SkinnedMesh {
return create(geometry, material, default_material, SkinnedMesh);
}
@ -43,7 +43,7 @@ function create<M extends Mesh>(
geometry: BufferGeometry,
material: Material | Material[] | undefined,
default_material: Material,
mesh_constructor: new (geometry: BufferGeometry, material: Material | Material[]) => M
mesh_constructor: new (geometry: BufferGeometry, material: Material | Material[]) => M,
): M {
const {
created_by_geometry_builder,

View File

@ -19,7 +19,7 @@ export const PSO_FRAME_RATE = 30;
export function create_animation_clip(
nj_object: NjObject<NjModel>,
nj_motion: NjMotion
nj_motion: NjMotion,
): AnimationClip {
const interpolation =
nj_motion.interpolation === NjInterpolation.Spline ? InterpolateSmooth : InterpolateLinear;
@ -40,7 +40,7 @@ export function create_animation_clip(
if (type === NjKeyframeTrackType.Rotation) {
const order = bone.evaluation_flags.zxy_rotation_order ? "ZXY" : "ZYX";
const quat = new Quaternion().setFromEuler(
new Euler(keyframe.value.x, keyframe.value.y, keyframe.value.z, order)
new Euler(keyframe.value.x, keyframe.value.y, keyframe.value.z, order),
);
values.push(quat.x, quat.y, quat.z, quat.w);
@ -55,8 +55,8 @@ export function create_animation_clip(
`.bones[${bone_id}].quaternion`,
times,
values,
interpolation
)
interpolation,
),
);
} else {
const name =
@ -72,6 +72,6 @@ export function create_animation_clip(
return new AnimationClip(
"Animation",
(nj_motion.frame_count - 1) / PSO_FRAME_RATE,
tracks
tracks,
).optimize();
}

View File

@ -1,5 +1,5 @@
import { Bone, BufferGeometry, Euler, Matrix3, Matrix4, Quaternion, Vector2, Vector3 } from "three";
import { vec3_to_threejs } from ".";
import { vec3_to_threejs } from "./index";
import { is_njcm_model, NjModel, NjObject } from "../../data_formats/parsing/ninja";
import { NjcmModel } from "../../data_formats/parsing/ninja/njcm";
import { XjModel } from "../../data_formats/parsing/ninja/xj";
@ -13,7 +13,7 @@ const NO_SCALE = new Vector3(1, 1, 1);
export function ninja_object_to_geometry_builder(
object: NjObject<NjModel>,
builder: GeometryBuilder
builder: GeometryBuilder,
): void {
new GeometryCreator(builder).to_geometry_builder(object);
}
@ -74,7 +74,7 @@ class GeometryCreator {
private object_to_geometry(
object: NjObject<NjModel>,
parent_bone: Bone | undefined,
parent_matrix: Matrix4
parent_matrix: Matrix4,
): void {
const {
no_translate,
@ -91,13 +91,13 @@ class GeometryCreator {
rotation.x,
rotation.y,
rotation.z,
zxy_rotation_order ? "ZXY" : "ZYX"
zxy_rotation_order ? "ZXY" : "ZYX",
);
const matrix = new Matrix4()
.compose(
no_translate ? NO_TRANSLATION : vec3_to_threejs(position),
no_rotate ? NO_ROTATION : new Quaternion().setFromEuler(euler),
no_scale ? NO_SCALE : vec3_to_threejs(scale)
no_scale ? NO_SCALE : vec3_to_threejs(scale),
)
.premultiply(parent_matrix);
@ -178,7 +178,7 @@ class GeometryCreator {
this.builder.add_vertex(
vertex.position,
normal,
mesh.has_tex_coords ? mesh_vertex.tex_coords! : DEFAULT_UV
mesh.has_tex_coords ? mesh_vertex.tex_coords! : DEFAULT_UV,
);
if (i >= 2) {
@ -209,7 +209,7 @@ class GeometryCreator {
this.builder.add_group(
start_index_count,
this.builder.index_count - start_index_count,
mesh.texture_id
mesh.texture_id,
);
}
}
@ -286,7 +286,7 @@ class GeometryCreator {
this.builder.add_group(
start_index_count,
this.builder.index_count - start_index_count,
current_mat_idx
current_mat_idx,
);
}
}

View File

@ -41,7 +41,7 @@ export function xvm_texture_to_texture(tex: XvmTexture): Texture {
],
tex.width,
tex.height,
format
format,
);
texture_3js.minFilter = LinearFilter;

View File

@ -1,16 +1,16 @@
import { observable } from "mobx";
import {
ItemType,
Server,
WeaponItemType,
ArmorItemType,
ItemType,
ShieldItemType,
ToolItemType,
UnitItemType,
} from "../domain";
WeaponItemType,
} from "../domain/items";
import { Loadable } from "../Loadable";
import { ServerMap } from "./ServerMap";
import { ItemTypeDto } from "../dto";
import { Server } from "../domain";
export class ItemTypeStore {
private id_to_item_type: ItemType[] = [];
@ -23,7 +23,7 @@ export class ItemTypeStore {
load = async (server: Server): Promise<ItemTypeStore> => {
const response = await fetch(
`${process.env.PUBLIC_URL}/itemTypes.${Server[server].toLowerCase()}.json`
`${process.env.PUBLIC_URL}/itemTypes.${Server[server].toLowerCase()}.json`,
);
const data: ItemTypeDto[] = await response.json();
@ -41,7 +41,7 @@ export class ItemTypeStore {
item_type_dto.maxAtp,
item_type_dto.ata,
item_type_dto.maxGrind,
item_type_dto.requiredAtp
item_type_dto.requiredAtp,
);
break;
case "armor":
@ -56,7 +56,7 @@ export class ItemTypeStore {
item_type_dto.maxDfp,
item_type_dto.mst,
item_type_dto.hp,
item_type_dto.lck
item_type_dto.lck,
);
break;
case "shield":
@ -71,7 +71,7 @@ export class ItemTypeStore {
item_type_dto.maxDfp,
item_type_dto.mst,
item_type_dto.hp,
item_type_dto.lck
item_type_dto.lck,
);
break;
case "unit":

View File

@ -1,6 +1,6 @@
import { computed } from "mobx";
import { Server } from "../domain";
import { application_store } from "./ApplicationStore";
import { application_store } from "../../application/stores/ApplicationStore";
import { EnumMap } from "../enums";
/**

View File

@ -107,8 +107,8 @@ export class BigTable<T> extends Component<{
height="18"
viewBox="0 0 24 24"
>
<path d="M7 14l5-5 5 5z"></path>
<path d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 14l5-5 5 5z" />
<path d="M0 0h24v24H0z" fill="none" />
</svg>
);
} else {
@ -119,8 +119,8 @@ export class BigTable<T> extends Component<{
height="18"
viewBox="0 0 24 24"
>
<path d="M7 10l5 5 5-5z"></path>
<path d="M0 0h24v24H0z" fill="none"></path>
<path d="M7 10l5 5 5-5z" />
<path d="M0 0h24v24H0z" fill="none" />
</svg>
);
}

View File

@ -4,7 +4,7 @@ export class Action {
constructor(
readonly description: string,
readonly undo: () => void,
readonly redo: () => void
readonly redo: () => void,
) {}
}

View File

@ -1,6 +1,6 @@
import { observable, IObservableArray, computed } from "mobx";
import { WeaponItem, WeaponItemType, ArmorItemType, ShieldItemType } from "../domain";
import { item_type_stores } from "./ItemTypeStore";
import { WeaponItem, WeaponItemType, ArmorItemType, ShieldItemType } from "../../core/domain/items";
import { item_type_stores } from "../../core/stores/ItemTypeStore";
const NORMAL_DAMAGE_FACTOR = 0.2 * 0.9;
const HEAVY_DAMAGE_FACTOR = NORMAL_DAMAGE_FACTOR * 1.89;
@ -81,19 +81,19 @@ class Weapon {
class DpsCalcStore {
@computed get weapon_types(): WeaponItemType[] {
return item_type_stores.current.value.item_types.filter(
it => it instanceof WeaponItemType
it => it instanceof WeaponItemType,
) as WeaponItemType[];
}
@computed get armor_types(): ArmorItemType[] {
return item_type_stores.current.value.item_types.filter(
it => it instanceof ArmorItemType
it => it instanceof ArmorItemType,
) as ArmorItemType[];
}
@computed get shield_types(): ShieldItemType[] {
return item_type_stores.current.value.item_types.filter(
it => it instanceof ShieldItemType
it => it instanceof ShieldItemType,
) as ShieldItemType[];
}

View File

@ -1,10 +1,10 @@
import { InputNumber } from "antd";
import { observer } from "mobx-react";
import React, { Component, ReactNode } from "react";
import { ArmorItemType, ShieldItemType, WeaponItemType } from "../../domain";
import { dps_calc_store } from "../../stores/DpsCalcStore";
import { item_type_stores } from "../../stores/ItemTypeStore";
import { BigSelect } from "../BigSelect";
import { ArmorItemType, ShieldItemType, WeaponItemType } from "../../core/domain/items";
import { dps_calc_store } from "../stores/DpsCalcStore";
import { item_type_stores } from "../../core/stores/ItemTypeStore";
import { BigSelect } from "../../core/ui/BigSelect";
@observer
export class DpsCalcComponent extends Component {

View File

@ -0,0 +1,73 @@
import { Episode } from "../../core/data_formats/parsing/quest/Episode";
import { NpcType } from "../../core/data_formats/parsing/quest/npc_types";
import { computed, observable } from "mobx";
import { ItemType } from "../../core/domain/items";
import { Difficulty, SectionId } from "../../core/domain";
export class HuntMethod {
readonly id: string;
readonly name: string;
readonly episode: Episode;
readonly quest: SimpleQuest;
readonly enemy_counts: Map<NpcType, number>;
/**
* The time it takes to complete the quest in hours.
*/
readonly default_time: number;
/**
* The time it takes to complete the quest in hours as specified by the user.
*/
@observable user_time?: number;
@computed get time(): number {
return this.user_time != null ? this.user_time : this.default_time;
}
constructor(id: string, name: string, quest: SimpleQuest, default_time: number) {
if (!id) throw new Error("id is required.");
if (default_time <= 0) throw new Error("default_time must be greater than zero.");
if (!name) throw new Error("name is required.");
if (!quest) throw new Error("quest is required.");
this.id = id;
this.name = name;
this.episode = quest.episode;
this.quest = quest;
this.enemy_counts = quest.enemy_counts;
this.default_time = default_time;
}
}
export class SimpleQuest {
constructor(
readonly id: number,
readonly name: string,
readonly episode: Episode,
readonly enemy_counts: Map<NpcType, number>,
) {
if (!id) throw new Error("id is required.");
if (!name) throw new Error("name is required.");
if (!enemy_counts) throw new Error("enemyCounts is required.");
}
}
type ItemDrop = {
item_type: ItemType;
anything_rate: number;
rare_rate: number;
};
export class EnemyDrop implements ItemDrop {
readonly rate: number;
constructor(
readonly difficulty: Difficulty,
readonly section_id: SectionId,
readonly npc_type: NpcType,
readonly item_type: ItemType,
readonly anything_rate: number,
readonly rare_rate: number,
) {
this.rate = anything_rate * rare_rate;
}
}

View File

@ -1,5 +1,6 @@
import { Persister } from "./Persister";
import { Server, HuntMethod } from "../domain";
import { Persister } from "../../core/persistence";
import { Server } from "../../core/domain";
import { HuntMethod } from "../domain";
const METHOD_USER_TIMES_KEY = "HuntMethodStore.methodUserTimes";
@ -18,11 +19,11 @@ class HuntMethodPersister extends Persister {
async load_method_user_times(
hunt_methods: HuntMethod[],
server: Server
server: Server,
): Promise<HuntMethod[]> {
const user_times = await this.load_for_server<PersistedUserTimes>(
server,
METHOD_USER_TIMES_KEY
METHOD_USER_TIMES_KEY,
);
if (user_times) {

View File

@ -1,7 +1,7 @@
import { Server } from "../domain";
import { Server } from "../../core/domain";
import { WantedItem } from "../stores/HuntOptimizerStore";
import { item_type_stores } from "../stores/ItemTypeStore";
import { Persister } from "./Persister";
import { item_type_stores } from "../../core/stores/ItemTypeStore";
import { Persister } from "../../core/persistence";
const WANTED_ITEMS_KEY = "HuntOptimizerStore.wantedItems";
@ -14,8 +14,8 @@ class HuntOptimizerPersister extends Persister {
({ item_type, amount }): PersistedWantedItem => ({
itemTypeId: item_type.id,
amount,
})
)
}),
),
);
}
@ -24,7 +24,7 @@ class HuntOptimizerPersister extends Persister {
const persisted_wanted_items = await this.load_for_server<PersistedWantedItem[]>(
server,
WANTED_ITEMS_KEY
WANTED_ITEMS_KEY,
);
const wanted_items: WantedItem[] = [];

View File

@ -1,11 +1,12 @@
import Logger from "js-logger";
import { autorun, IReactionDisposer, observable } from "mobx";
import { HuntMethod, Server, SimpleQuest } from "../domain";
import { QuestDto } from "../dto";
import { Loadable } from "../Loadable";
import { Server } from "../../core/domain";
import { QuestDto } from "../../core/dto";
import { Loadable } from "../../core/Loadable";
import { hunt_method_persister } from "../persistence/HuntMethodPersister";
import { ServerMap } from "./ServerMap";
import { NpcType } from "../data_formats/parsing/quest/npc_types";
import { ServerMap } from "../../core/stores/ServerMap";
import { NpcType } from "../../core/data_formats/parsing/quest/npc_types";
import { HuntMethod, SimpleQuest } from "../domain";
const logger = Logger.get("stores/HuntMethodStore");

View File

@ -3,20 +3,20 @@ import { autorun, computed, IObservableArray, observable } from "mobx";
import {
Difficulties,
Difficulty,
HuntMethod,
ItemType,
KONDRIEU_PROB,
RARE_ENEMY_PROB,
SectionId,
SectionIds,
} from "../domain";
} from "../../core/domain";
import { hunt_optimizer_persister } from "../persistence/HuntOptimizerPersister";
import { application_store } from "./ApplicationStore";
import { application_store } from "../../application/stores/ApplicationStore";
import { hunt_method_store } from "./HuntMethodStore";
import { item_drop_stores } from "./ItemDropStore";
import { item_type_stores } from "./ItemTypeStore";
import { Episode } from "../data_formats/parsing/quest/Episode";
import { npc_data, NpcType } from "../data_formats/parsing/quest/npc_types";
import { item_type_stores } from "../../core/stores/ItemTypeStore";
import { Episode } from "../../core/data_formats/parsing/quest/Episode";
import { npc_data, NpcType } from "../../core/data_formats/parsing/quest/npc_types";
import { HuntMethod } from "../domain";
import { ItemType } from "../../core/domain/items";
export class WantedItem {
@observable readonly item_type: ItemType;

View File

@ -1,11 +1,12 @@
import { observable } from "mobx";
import { Difficulties, Difficulty, EnemyDrop, SectionId, SectionIds, Server } from "../domain";
import { EnemyDropDto } from "../dto";
import { Loadable } from "../Loadable";
import { item_type_stores } from "./ItemTypeStore";
import { ServerMap } from "./ServerMap";
import { Difficulties, Difficulty, SectionId, SectionIds, Server } from "../../core/domain";
import { EnemyDropDto } from "../../core/dto";
import { Loadable } from "../../core/Loadable";
import { item_type_stores } from "../../core/stores/ItemTypeStore";
import { ServerMap } from "../../core/stores/ServerMap";
import Logger from "js-logger";
import { NpcType } from "../data_formats/parsing/quest/npc_types";
import { NpcType } from "../../core/data_formats/parsing/quest/npc_types";
import { EnemyDrop } from "../domain";
const logger = Logger.get("stores/ItemDropStore");

View File

@ -3,12 +3,16 @@ import { observer } from "mobx-react";
import moment, { Moment } from "moment";
import React, { Component, ReactNode } from "react";
import { AutoSizer, Index, SortDirection } from "react-virtualized";
import { hunt_method_store } from "../../stores/HuntMethodStore";
import { BigTable, Column, ColumnSort } from "../BigTable";
import { hunt_method_store } from "../stores/HuntMethodStore";
import { BigTable, Column, ColumnSort } from "../../core/ui/BigTable";
import styles from "./MethodsComponent.css";
import { HuntMethod } from "../../domain";
import { Episode } from "../../data_formats/parsing/quest/Episode";
import { ENEMY_NPC_TYPES, npc_data, NpcType } from "../../data_formats/parsing/quest/npc_types";
import { Episode } from "../../core/data_formats/parsing/quest/Episode";
import {
ENEMY_NPC_TYPES,
npc_data,
NpcType,
} from "../../core/data_formats/parsing/quest/npc_types";
import { HuntMethod } from "../domain";
@observer
export class MethodsComponent extends Component {

View File

@ -2,13 +2,13 @@ import { computed } from "mobx";
import { observer } from "mobx-react";
import React, { Component, ReactNode } from "react";
import { AutoSizer, Index } from "react-virtualized";
import { Difficulty, SectionId } from "../../domain";
import { hunt_optimizer_store, OptimalMethod } from "../../stores/HuntOptimizerStore";
import { BigTable, Column } from "../BigTable";
import { SectionIdIcon } from "../SectionIdIcon";
import { hours_to_string } from "../time";
import { Difficulty, SectionId } from "../../core/domain";
import { hunt_optimizer_store, OptimalMethod } from "../stores/HuntOptimizerStore";
import { BigTable, Column } from "../../core/ui/BigTable";
import { SectionIdIcon } from "../../core/ui/SectionIdIcon";
import { hours_to_string } from "../../core/ui/time";
import styles from "./OptimizationResultComponent.css";
import { Episode } from "../../data_formats/parsing/quest/Episode";
import { Episode } from "../../core/data_formats/parsing/quest/Episode";
@observer
export class OptimizationResultComponent extends Component {

Some files were not shown because too many files have changed in this diff Show More