170 lines
6.5 KiB
Python
170 lines
6.5 KiB
Python
|
from ChocolateBirdData.reference_implementation import get_base_structarraytypes, parse_struct_definitions_from_tsv_filename, get_structarraytype, LeftoverBits, ReadBuffer, WriteBuffer
|
||
|
|
||
|
|
||
|
def flatten_keys(d: dict, prefix: str = '') -> dict:
|
||
|
output = {}
|
||
|
for k, v in d.items():
|
||
|
if isinstance(v, dict):
|
||
|
flat = flatten_keys(v, f'{prefix}{k}.')
|
||
|
for k2, v2 in flat.items():
|
||
|
output[k2] = v2
|
||
|
else:
|
||
|
output[f'{prefix}{k}'] = v
|
||
|
return output
|
||
|
|
||
|
|
||
|
def unflatten_keys(d: dict) -> dict:
|
||
|
output = {}
|
||
|
for k, v in d.items():
|
||
|
keysplit = k.split('.')
|
||
|
target_dict = output
|
||
|
for prefix in keysplit[:-1]:
|
||
|
if prefix not in target_dict:
|
||
|
target_dict[prefix] = {}
|
||
|
target_dict = target_dict[prefix]
|
||
|
target_dict[k] = v
|
||
|
return output
|
||
|
|
||
|
|
||
|
def dump_tsv(filename, table, id_column=True) -> None:
|
||
|
table_flat = [flatten_keys(d) for d in table]
|
||
|
|
||
|
with open(filename, 'w') as file:
|
||
|
headers = list(table_flat[0].keys())
|
||
|
if id_column:
|
||
|
hex_digits = len(f'{len(table_flat)-1:X}') # See how long the hex representation of the last number will be, so we can zero-pad the rest to match.
|
||
|
hex_format = f'0{hex_digits}X'
|
||
|
file.write('\t'.join(['ID'] + headers) + '\n')
|
||
|
for i, entry in enumerate(table_flat):
|
||
|
file.write('\t'.join([f'0x{i:{hex_format}}'] + [str(entry[key]) for key in headers]) + '\n')
|
||
|
else:
|
||
|
file.write('\t'.join(headers) + '\n')
|
||
|
for i, entry in enumerate(table_flat):
|
||
|
file.write('\t'.join([str(entry[key]) for key in headers]) + '\n')
|
||
|
|
||
|
|
||
|
def try_int(v):
|
||
|
try:
|
||
|
return int(v, 0)
|
||
|
except:
|
||
|
return v
|
||
|
|
||
|
def load_tsv(filename) -> list:
|
||
|
with open(filename, 'r') as file:
|
||
|
lines = file.read().rstrip().split('\n')
|
||
|
headers = lines[0].split('\t')
|
||
|
output = []
|
||
|
for line in lines[1:]:
|
||
|
entry = {key: try_int(value) for key, value in zip(headers, line.split('\t'))}
|
||
|
output.append(unflatten_keys(entry))
|
||
|
return output
|
||
|
|
||
|
|
||
|
def load_ff5_snes_struct_definitions() -> dict:
|
||
|
existing_structs = get_base_structarraytypes()
|
||
|
parse_struct_definitions_from_tsv_filename('ChocolateBirdData/structs_SNES_stubs.tsv', existing_structs)
|
||
|
parse_struct_definitions_from_tsv_filename('ChocolateBirdData/5/structs/SNES_stubs.tsv', existing_structs)
|
||
|
parse_struct_definitions_from_tsv_filename('ChocolateBirdData/5/structs/SNES.tsv', existing_structs)
|
||
|
parse_struct_definitions_from_tsv_filename('ChocolateBirdData/5/structs/SNES_save.tsv', existing_structs)
|
||
|
return existing_structs
|
||
|
|
||
|
class FF5SNESHandler:
|
||
|
struct_definitions: dict = load_ff5_snes_struct_definitions()
|
||
|
addresses: dict = {entry['Label']: entry for entry in load_tsv('ChocolateBirdData/5/addresses_SNES_PSX.tsv')}
|
||
|
|
||
|
def extract(self, table: str, in_buffer) -> list[dict]:
|
||
|
# Deserialize a table
|
||
|
leftover_bits = LeftoverBits()
|
||
|
entry = self.addresses[table] # Remember to try/catch
|
||
|
offset = entry['SNES']
|
||
|
buf = ReadBuffer(in_buffer, offset)
|
||
|
return get_structarraytype(entry['format'], self.struct_definitions).get_value(buf, leftover_bits)
|
||
|
|
||
|
def build(self, table: str, new_data: list[dict], out_buffer):
|
||
|
# Serialize complete data. This WILL fail if the input data is incomplete.
|
||
|
leftover_bits = LeftoverBits()
|
||
|
entry = self.addresses[table] # Remember to try/catch
|
||
|
offset = entry['SNES']
|
||
|
buf = WriteBuffer(out_buffer, offset)
|
||
|
get_structarraytype(entry['format'], self.struct_definitions).put_value(buf, new_data, leftover_bits)
|
||
|
|
||
|
def build_partial(self, table: str, new_data: list[dict], in_buffer, out_buffer):
|
||
|
# Safely merge partial data over the existing data, then serialize it.
|
||
|
existing_data = self.extract(table, in_buffer)
|
||
|
for i, new in enumerate(new_data):
|
||
|
id = new.get('ID', i)
|
||
|
for k, v in new.items():
|
||
|
if k != 'ID':
|
||
|
existing_data[id][k] = v
|
||
|
self.build(table, existing_data, out_buffer)
|
||
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
from argparse import ArgumentParser
|
||
|
parser = ArgumentParser(description='The ROMhacking Table Compiler.')
|
||
|
parser.add_argument('action', choices=['extract', 'build'])
|
||
|
parser.add_argument('rom', help='The ROM to use as a basis for extracting data.')
|
||
|
parser.add_argument('project', help='The project folder to extract data to, or compile data from.')
|
||
|
parser.add_argument('tables', nargs='*', help='Specify which tables to extract or compile, separated by spaces. If left empty, nothing will be extracted, or all tables in a project will be compiled. See the labels in https://git.ufeff.net/birdulon/ChocolateBirdData/src/branch/master/5/addresses_SNES_PSX.tsv for a list of values which may be used, though bear in mind things such as graphics and maps are currently not supported in a sensible way.')
|
||
|
args = parser.parse_args()
|
||
|
|
||
|
if args.project:
|
||
|
project_folder = args.project.rstrip('/') + '/'
|
||
|
project_folder_len = len(project_folder)
|
||
|
|
||
|
from glob import glob
|
||
|
from configparser import ConfigParser
|
||
|
config = ConfigParser()
|
||
|
config['TabComp.Project'] = {'Game': 'Final Fantasy V', 'Platform': 'SNES', 'Region': 'any'}
|
||
|
try:
|
||
|
with open(f'{project_folder}project.ini', 'r') as configfile:
|
||
|
config.read_file(configfile)
|
||
|
except FileNotFoundError:
|
||
|
pass
|
||
|
with open(f'{project_folder}project.ini', 'w') as configfile:
|
||
|
config.write(configfile)
|
||
|
|
||
|
def run():
|
||
|
game = config['TabComp.Project']['Game']
|
||
|
platform = config['TabComp.Project']['Platform']
|
||
|
if game != 'Final Fantasy V' or platform != 'SNES':
|
||
|
print(f'Unsupported ROM for project - "{game}" on "{platform}"')
|
||
|
return
|
||
|
handler = FF5SNESHandler()
|
||
|
if not args.rom:
|
||
|
print('No ROM specified!')
|
||
|
return
|
||
|
with open(args.rom, 'rb') as file:
|
||
|
rom_bytes = file.read()
|
||
|
in_buffer = bytearray(rom_bytes)
|
||
|
match args.action:
|
||
|
case 'extract':
|
||
|
if not args.tables:
|
||
|
print('Must specify tables to extract!')
|
||
|
return
|
||
|
tables = [table for table in args.tables]
|
||
|
print(f'Attempting to extract tables {tables}')
|
||
|
for table in tables:
|
||
|
data = handler.extract(table, in_buffer)
|
||
|
dump_tsv(f'{project_folder}{table}.tsv', data)
|
||
|
print('Done extracting!')
|
||
|
|
||
|
case 'build':
|
||
|
tables = [table for table in args.tables]
|
||
|
if not args.tables:
|
||
|
# Find all .tsv files in project folder
|
||
|
tables = [file[project_folder_len:-4] for file in glob(f'{project_folder}*.tsv')]
|
||
|
print(f'Attempting to build tables {tables}')
|
||
|
out_buffer = bytearray(rom_bytes)
|
||
|
for table in tables:
|
||
|
data = load_tsv(f'{project_folder}{table}.tsv')
|
||
|
handler.build_partial(table, data, in_buffer, out_buffer)
|
||
|
out_filename = f'{project_folder}rom.sfc'
|
||
|
with open(out_filename, 'wb') as file:
|
||
|
file.write(out_buffer)
|
||
|
print(f'Compiled to "{out_filename}", make your own .ips from this')
|
||
|
case _:
|
||
|
'Invalid action!'
|
||
|
return
|
||
|
run()
|