mirror of
https://github.com/AntonioND/architectds.git
synced 2025-06-18 16:45:32 -04:00

Thanks to ninja-build, all we need to do is to run one command [1] and we can get a compile_commands.json from a ninja.build file. This commit adds a new argument (-j or --compdb) to let users generate a compile_commands.json file. [1] https://ninja-build.org/manual.html#_extra_tools
2301 lines
85 KiB
Python
2301 lines
85 KiB
Python
# SPDX-License-Identifier: MIT
|
|
#
|
|
# Copyright (c) 2024-2025 Antonio Niño Díaz <antonio_nd@outlook.com>
|
|
|
|
# Useful ninja-build commands:
|
|
#
|
|
# - Build:
|
|
#
|
|
# export BLOCKSDS=/opt/blocksds/core
|
|
# ninja
|
|
#
|
|
# - Clean:
|
|
#
|
|
# ninja -t clean
|
|
#
|
|
# - Dependency graph:
|
|
#
|
|
# sudo apt install graphviz
|
|
# ninja -t graph | dot -Tpng -ograph.png
|
|
#
|
|
# GRF format documentation:
|
|
#
|
|
# - https://www.coranac.com/man/grit/html/grit.htm
|
|
|
|
import json
|
|
import os
|
|
|
|
AUTHOR_STRING = 'Antonio Niño Díaz'
|
|
VERSION_STRING = '0.3.1'
|
|
|
|
BLOCKSDS = os.environ.get('BLOCKSDS', '/opt/blocksds/core')
|
|
BLOCKSDSEXT = os.environ.get('BLOCKSDSEXT', '/opt/blocksds/external')
|
|
WONDERFUL_TOOLCHAIN = os.environ.get('WONDERFUL_TOOLCHAIN', '/opt/wonderful')
|
|
|
|
class CJSONDecoder(json.JSONDecoder):
|
|
'''
|
|
Decoder of CJSON (JSON with C-style comments).
|
|
'''
|
|
def __init__(self, **kw):
|
|
super().__init__(**kw)
|
|
|
|
def decode(self, s: str):
|
|
lines = []
|
|
for l in s.split('\n'):
|
|
lines.append(l.split('//')[0])
|
|
s = '\n'.join(lines)
|
|
return super().decode(s)
|
|
|
|
def load_json(path):
|
|
'''
|
|
This loads a JSON file and returns a JSON instance.
|
|
'''
|
|
with open(path) as f:
|
|
return json.load(f, cls=CJSONDecoder)
|
|
|
|
def get_parent_dir(path):
|
|
'''
|
|
From a path that includes a folder and a file name it returns the path of
|
|
the folder where the file is located. If the path only includes a file name,
|
|
it returns None.
|
|
Example: get_parent_dir('my/dir/file.png') -> 'my/dir'
|
|
'''
|
|
parent = os.path.split(path)[0]
|
|
if len(parent) > 0:
|
|
return parent
|
|
else:
|
|
return None
|
|
|
|
def get_file_name(path):
|
|
'''
|
|
From a path that includes a folder and a file name it returns the file
|
|
name. If the path only includes a file name, it returns it unmodified.
|
|
Example: get_file_name('my/dir/file.png') -> 'file.png'
|
|
'''
|
|
name = os.path.split(path)[1]
|
|
if len(name) > 1:
|
|
return name
|
|
else:
|
|
return None
|
|
|
|
def replace_ext(path, old_ext, new_ext):
|
|
'''
|
|
Removes the previous extension of the path if it matches 'old_ext', and
|
|
appends 'new_ext'. Example: replace_ext('file.png', '.png', '') -> 'file'
|
|
'''
|
|
return path.removesuffix(old_ext) + new_ext
|
|
|
|
def remove_ext(path):
|
|
'''
|
|
Removes the extension from a path, or None if there is nothing to return.
|
|
'''
|
|
path_no_ext = os.path.splitext(path)[0]
|
|
if len(path_no_ext) > 0:
|
|
return path_no_ext
|
|
else:
|
|
return None
|
|
|
|
def gen_input_file_list(dir_path, extensions=None):
|
|
'''
|
|
This generates a list of paths to files inside the provided path in
|
|
'dir_path'. By default it will look for files with any extension, but you
|
|
can also specify a list of extensions. For example:
|
|
|
|
gen_input_file_list('audio', extensions=['.wav', '.mod', '.s3m', '.it', '.xm'])
|
|
'''
|
|
in_files = []
|
|
for root, dirs, files in os.walk(dir_path):
|
|
for _file in files:
|
|
if extensions is not None:
|
|
if not _file.endswith(extensions):
|
|
continue;
|
|
in_files.append(os.path.join(root, _file))
|
|
|
|
return in_files
|
|
|
|
def gen_out_file_list(in_files, in_prefix, out_prefix, in_suffix, out_suffix):
|
|
'''
|
|
This function takes a list of files as input and returns a list of objects
|
|
with attributes 'in_path' (the initial path provided as input) and
|
|
'out_path'. The out path is generated by removing 'in_prefix' and
|
|
'in_suffix' (if they are found) and adding 'out_prefix' and 'out_suffix'.
|
|
'''
|
|
class InOutFile():
|
|
def __init__(self, in_path, out_path):
|
|
self.in_path = in_path
|
|
self.out_path = out_path
|
|
|
|
files = []
|
|
for in_file in in_files:
|
|
tmp = out_prefix + in_file.removeprefix(in_prefix)
|
|
tmp = tmp.removesuffix(in_suffix) + out_suffix
|
|
files.append(InOutFile(in_file, tmp))
|
|
|
|
return files
|
|
|
|
class GenericBinary():
|
|
'''
|
|
Class that defines any binary that may be built as a combination of multiple
|
|
inputs. For example, this may represent a CPU binary, or a filesystem image.
|
|
'''
|
|
|
|
def __init__(self, flag_assets_name):
|
|
self.flag_assets_name = flag_assets_name
|
|
self.contents = ''
|
|
self.dir_targets = set()
|
|
|
|
def print(self, string):
|
|
'''
|
|
Add contents to the rule container of this binary.
|
|
'''
|
|
self.contents += string
|
|
|
|
def save_to_file(self, out_path='build.ninja'):
|
|
'''
|
|
Save to a file all the rules generated for a binary.
|
|
'''
|
|
with open(out_path, 'w') as f:
|
|
f.write(self.contents)
|
|
|
|
def run_command_line_arguments(self, args=None, ninja_file_path='build.ninja',
|
|
graph_png_path='graph.png'):
|
|
'''
|
|
Function that parses command line arguments to help the user build the
|
|
ROM, generate the ninja build file, clean the project, or generate a
|
|
dependency graph in PNG format. It will use sys.argv unless the caller
|
|
specifies a custom list of arguments.
|
|
'''
|
|
import os
|
|
import shutil
|
|
import subprocess
|
|
import sys
|
|
|
|
if args is None:
|
|
args = sys.argv
|
|
|
|
build = '-b' in args or '--build' in args
|
|
clean = '-c' in args or '--clean' in args
|
|
graph = '-g' in args or '--graph' in args
|
|
help_ = '-h' in args or '--help' in args
|
|
compdb = '-j' in args or '--compdb' in args
|
|
ninja = '-n' in args or '--ninja' in args
|
|
|
|
# Build ROM if the script has been called with no arguments.
|
|
if not (build or clean or compdb or graph or ninja or help_):
|
|
build = True
|
|
|
|
# If there is any argument that requires the ninja file, generate it.
|
|
if build or compdb or graph:
|
|
ninja = True
|
|
|
|
if help_:
|
|
print('ArchitectDS: Build system for NDS that uses ninja-build.')
|
|
print('')
|
|
print('Options:')
|
|
print('')
|
|
print(' -b / --build : Generate ninja file and build ROM.')
|
|
print(' -c / --clean : Clean build outputs and ninja file.')
|
|
print(' -g / --graph : Generate dependency graph as a PNG file.')
|
|
print(' -n / --ninja : Only generate ninja file.')
|
|
print(' -j / --compdb : Generate compile_commands.json.')
|
|
print(' -h / --help : Show this message.')
|
|
print('')
|
|
print('If no option is used, the tool will act the same as if')
|
|
print('--build had been used.')
|
|
return
|
|
|
|
if clean:
|
|
print('[*] CLEAN')
|
|
if os.path.isfile(ninja_file_path):
|
|
subprocess.run(['ninja', '-f', ninja_file_path, '-t', 'clean'])
|
|
os.remove(ninja_file_path)
|
|
|
|
if os.path.isdir('build'):
|
|
shutil.rmtree('build', ignore_errors=True)
|
|
|
|
if os.path.isfile(graph_png_path):
|
|
os.remove(graph_png_path)
|
|
|
|
if ninja:
|
|
print('[*] NINJA')
|
|
self.save_to_file(out_path=ninja_file_path)
|
|
|
|
if build:
|
|
print('[*] BUILD')
|
|
my_env = os.environ.copy()
|
|
my_env["BLOCKSDS"] = BLOCKSDS
|
|
subprocess.run(['ninja', '-f', ninja_file_path], env=my_env)
|
|
|
|
if compdb:
|
|
print('[*] COMPDB')
|
|
proc = subprocess.run(['ninja', '-f', ninja_file_path, '-t', 'compdb'],
|
|
encoding='utf-8', stdout=subprocess.PIPE)
|
|
with open('compile_commands.json', 'w') as f:
|
|
f.write(proc.stdout)
|
|
|
|
if graph:
|
|
print('[*] GRAPH')
|
|
ninja_process = subprocess.Popen(
|
|
['ninja', '-f', ninja_file_path, '-t', 'graph'],
|
|
stdout=subprocess.PIPE)
|
|
dot_process = subprocess.Popen(['dot', '-Tpng', '-o' + graph_png_path],
|
|
stdin=ninja_process.stdout,
|
|
stdout=subprocess.PIPE)
|
|
ninja_process.stdout.close() # enable write error in ninja if dot dies
|
|
out, err = dot_process.communicate()
|
|
|
|
def add_dir_target(self, new_dir):
|
|
'''
|
|
Add a list of directories to the list of directories that will contain
|
|
build results for this binary.
|
|
'''
|
|
self.dir_targets.add(new_dir)
|
|
|
|
# Add parent dirs as well
|
|
parent = get_parent_dir(new_dir)
|
|
if parent is not None:
|
|
self.add_dir_target(parent)
|
|
|
|
def _gen_rules_build_directories(self):
|
|
'''
|
|
Generate rules to make all output directories in the right order with
|
|
the right dependencies on other directories.
|
|
'''
|
|
# Sort the list of directories so that parent directories appear last.
|
|
# This will help the clean target delete all directories in the right
|
|
# order (subdirectories before the directory they belong to).
|
|
dir_list = list(self.dir_targets)
|
|
dir_list.sort(reverse=True)
|
|
|
|
for dir_target in dir_list:
|
|
# Each dir depends on the parent dir
|
|
parent = get_parent_dir(dir_target)
|
|
if parent is None:
|
|
self.print(
|
|
f'build {dir_target}: makedir\n'
|
|
'\n'
|
|
)
|
|
else:
|
|
self.print(
|
|
f'build {dir_target}: makedir || {parent}\n'
|
|
'\n'
|
|
)
|
|
|
|
def _gen_rules_tools(self):
|
|
'''
|
|
This generates rules for all possible tools used to generate NDS ROMs and
|
|
saves it to the object.
|
|
'''
|
|
|
|
self.print(
|
|
f'# File generated by ArchitectDS (Version {VERSION_STRING})\n'
|
|
'\n'
|
|
f'BLOCKSDS = {BLOCKSDS}\n'
|
|
f'BLOCKSDSEXT = {BLOCKSDSEXT}\n'
|
|
f'WONDERFUL_TOOLCHAIN = {WONDERFUL_TOOLCHAIN}\n'
|
|
'\n'
|
|
'ARM_NONE_EABI_PATH = ${WONDERFUL_TOOLCHAIN}/toolchain/gcc-arm-none-eabi/bin/\n'
|
|
'LLVM_TEAK_PATH = ${WONDERFUL_TOOLCHAIN}/toolchain/llvm-teak/bin/\n'
|
|
'\n'
|
|
'PREFIX = ${ARM_NONE_EABI_PATH}arm-none-eabi-\n'
|
|
'CC_ARM = ${PREFIX}gcc\n'
|
|
'CXX_ARM = ${PREFIX}g++\n'
|
|
'\n'
|
|
'CC_TEAK = ${LLVM_TEAK_PATH}clang\n'
|
|
'CXX_TEAK = ${LLVM_TEAK_PATH}clang++\n'
|
|
'LD_TEAK = ${LLVM_TEAK_PATH}ld.lld\n'
|
|
'\n'
|
|
'BIN2C = ${BLOCKSDS}/tools/bin2c/bin2c\n'
|
|
'GRIT = ${BLOCKSDS}/tools/grit/grit\n'
|
|
'SQUEEZERW = ${BLOCKSDS}/tools/squeezer/squeezerw\n'
|
|
'MMUTIL = ${BLOCKSDS}/tools/mmutil/mmutil\n'
|
|
'NDSTOOL = ${BLOCKSDS}/tools/ndstool/ndstool\n'
|
|
'TEAKTOOL = ${BLOCKSDS}/tools/teaktool/teaktool\n'
|
|
'DSLTOOL = ${BLOCKSDS}/tools/dsltool/dsltool\n'
|
|
'\n'
|
|
)
|
|
|
|
# In MinGW, paths for executable files must start with 'C:/', but
|
|
# python3 expects them to start with '/c/'.
|
|
blocksdsext = BLOCKSDSEXT.replace('C:/', '/c/')
|
|
self.print(
|
|
f'OBJ2DL = python3 {blocksdsext}/nitro-engine/tools/obj2dl/obj2dl.py\n'
|
|
f'MD5_TO_DSMA = python3 {blocksdsext}/nitro-engine/tools/md5_to_dsma/md5_to_dsma.py\n'
|
|
)
|
|
|
|
self.print(
|
|
'PTEXCONV = ${BLOCKSDSEXT}/ptexconv/ptexconv\n'
|
|
'\n'
|
|
'rule makedir\n'
|
|
' command = mkdir $out\n'
|
|
'\n'
|
|
'rule copy\n'
|
|
' command = cp $in $out\n'
|
|
'\n'
|
|
'rule bin2c\n'
|
|
' command = ${BIN2C} $in $outdir\n'
|
|
'\n'
|
|
# mmutil crashes when there are two processes generating soundbanks
|
|
# at the same time.
|
|
'pool mmutil_pool\n'
|
|
' depth = 1\n'
|
|
'\n'
|
|
'rule mmutil\n'
|
|
' command = ${MMUTIL} $in -d -o${soundbank_bin} -h${soundbank_info_h}\n'
|
|
' pool = mmutil_pool\n'
|
|
'\n'
|
|
'rule as_arm\n'
|
|
' command = ${CC_ARM} ${asflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule cc_arm\n'
|
|
' command = ${CC_ARM} ${cflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule cxx_arm\n'
|
|
' command = ${CXX_ARM} ${cxxflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule ld_arm\n'
|
|
' command = ${CC_ARM} -o $out $in ${ldflags}\n'
|
|
'\n'
|
|
'rule as_teak\n'
|
|
' command = ${CC_TEAK} ${asflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule cc_teak\n'
|
|
' command = ${CC_TEAK} ${cflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule cxx_teak\n'
|
|
' command = ${CXX_TEAK} ${cxxflags} -MMD -MP -c -o $out $in\n'
|
|
' deps = gcc\n'
|
|
' depfile = ${dep}\n'
|
|
'\n'
|
|
'rule ld_teak\n'
|
|
' command = ${LD_TEAK} -o $out $in ${ldflags}\n'
|
|
'\n'
|
|
'rule teaktool\n'
|
|
' command = ${TEAKTOOL} -i $in -o $out\n'
|
|
'\n'
|
|
'rule dsltool\n'
|
|
' command = ${DSLTOOL} -i ${elf_path} -o $out ${args}\n'
|
|
'\n'
|
|
'rule ndstool\n'
|
|
' command = ${NDSTOOL} -c $out -7 ${arm7elf} -9 ${arm9elf} -b ${game_icon} ${game_full_title} ${ndstool_nitrofs_flags}\n'
|
|
'\n'
|
|
'rule grit\n'
|
|
' command = ${GRIT} ${in_path_img} ${options} -o ${grit_out_path}\n'
|
|
'\n'
|
|
'rule squeezerw\n'
|
|
' command = ${SQUEEZERW} ${args}\n'
|
|
'\n'
|
|
'rule grit_nf_shared\n'
|
|
' command = ${GRIT} ${in_files_png} ${options}\n'
|
|
'\n'
|
|
'rule obj2dl\n'
|
|
' command = ${OBJ2DL} --input ${in_path_obj} --output $out ${args}\n'
|
|
'\n'
|
|
'rule md5_to_dsma\n'
|
|
' command = ${MD5_TO_DSMA} ${args}\n'
|
|
'\n'
|
|
'rule ptexconv\n'
|
|
' command = ${PTEXCONV} ${args}\n'
|
|
'\n'
|
|
)
|
|
|
|
class GenericCpuBinary(GenericBinary):
|
|
'''
|
|
This class has functions that can be used for any CPU binary in the NDS.
|
|
'''
|
|
|
|
def __init__(self, flag_assets_name):
|
|
super().__init__(flag_assets_name)
|
|
self.out_assets_path = None
|
|
self.assets_c = []
|
|
self.assets_h = []
|
|
|
|
def add_header_dependencies(self, h_files):
|
|
self.assets_h.extend(h_files)
|
|
|
|
def _gen_rule_assets_barrier(self):
|
|
'''
|
|
This generates a common phony target to all the files injected to the
|
|
CPU binary as data. This phony target can be used instead of all the
|
|
files when building the final CPU binary.
|
|
'''
|
|
flag_path = self.flag_assets_name
|
|
file_paths_str = ' '.join(self.assets_h)
|
|
self.print(
|
|
f'build {flag_path}: phony {file_paths_str}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_data(self, in_dirs, out_dir='data'):
|
|
'''
|
|
This function takes a list of directories and injects them as data in
|
|
the CPU binary. It will take any file found in the directories
|
|
regardless of the extension.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir)
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '', ''))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_base = '_'.join(in_out_file.out_path.rsplit('.', 1))
|
|
|
|
out_path_dir = get_parent_dir(out_path_base)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path = in_out_file.in_path
|
|
|
|
out_path_c = out_path_base + '.c'
|
|
out_path_h = out_path_base + '.h'
|
|
self.assets_c.append(out_path_c)
|
|
self.assets_h.append(out_path_h)
|
|
|
|
self.print(
|
|
f'build {out_path_c} {out_path_h}: bin2c {in_path} || {out_path_dir}\n'
|
|
f' outdir = {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_bmfont_fnt(self, in_dirs, out_dir='bmfont'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.fnt' (they must be exported in binary format, not
|
|
text or xml format) and copy them to the filesystem.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.fnt'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '', ''))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_base = '_'.join(in_out_file.out_path.rsplit('.', 1))
|
|
|
|
out_path_dir = get_parent_dir(out_path_base)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path = in_out_file.in_path
|
|
|
|
out_path_c = out_path_base + '.c'
|
|
out_path_h = out_path_base + '.h'
|
|
self.assets_c.append(out_path_c)
|
|
self.assets_h.append(out_path_h)
|
|
|
|
self.print(
|
|
f'build {out_path_c} {out_path_h}: bin2c {in_path} || {out_path_dir}\n'
|
|
f' outdir = {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_data_file(self, in_path, out_dir):
|
|
'''
|
|
This function takes a file and injects it as data in the CPU binary.
|
|
'''
|
|
in_file_name = get_file_name(in_path)
|
|
out_path_base = os.path.join(out_dir, in_file_name)
|
|
out_path_base = '_'.join(out_path_base.rsplit('.', 1))
|
|
|
|
out_path_dir = get_parent_dir(out_path_base)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
out_path_c = out_path_base + '.c'
|
|
out_path_h = out_path_base + '.h'
|
|
self.assets_c.append(out_path_c)
|
|
self.assets_h.append(out_path_h)
|
|
|
|
self.print(
|
|
f'build {out_path_c} {out_path_h}: bin2c {in_path} || {out_path_dir}\n'
|
|
f' outdir = {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
class GenericArmBinary(GenericCpuBinary):
|
|
'''
|
|
This class has functions that can be used for any ARM binary in the NDS.
|
|
'''
|
|
|
|
def __init__(self, flag_assets_name):
|
|
super().__init__(flag_assets_name)
|
|
|
|
def _gen_rules_source_arm(self, in_dirs, out_dir, asflags, cflags, cxxflags,
|
|
assets_c_files, assets_h_flag):
|
|
'''
|
|
Generates rules to build all provided source files, and adds additional
|
|
headers as dependencies of the source files.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'in_dirs': List of paths to directories with source code.
|
|
- 'out_dir': Base path to store all build results.
|
|
- 'asflags': All flags to be passed to the assembler.
|
|
- 'cflags': All flags to be passed to the C compiler.
|
|
- 'cxxflags': All flags to be passed to the C++ compiler.
|
|
- 'assets_c_files': Additional C files (result of converting assets).
|
|
- 'assets_h_files': Additional H files (result of converting assets).
|
|
'''
|
|
|
|
in_out_files = []
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.c', '.cpp', '.s'))
|
|
in_out_files.extend(gen_out_file_list(in_files, '', out_dir + '/', '', '.o'))
|
|
|
|
in_out_files.extend(gen_out_file_list(assets_c_files, '', out_dir + '/', '', '.o'))
|
|
|
|
for in_out_file in in_out_files:
|
|
obj_out_path = in_out_file.out_path
|
|
dep_out_path = replace_ext(obj_out_path, '.o', '.d')
|
|
|
|
in_path = in_out_file.in_path
|
|
|
|
out_path_dir = get_parent_dir(obj_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
self.obj_file_paths.append(obj_out_path)
|
|
|
|
if in_path.endswith('.cpp'):
|
|
self.has_cpp = True
|
|
|
|
if in_path.endswith('.arm.cpp'):
|
|
self.print(
|
|
f'build {obj_out_path}: cxx_arm {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cxxflags = -marm -mlong-calls {cxxflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.arm.c'):
|
|
self.print(
|
|
f'build {obj_out_path}: cc_arm {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cflags = -marm -mlong-calls {cflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.cpp'):
|
|
self.print(
|
|
f'build {obj_out_path}: cxx_arm {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cxxflags = -mthumb {cxxflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.c'):
|
|
self.print(
|
|
f'build {obj_out_path}: cc_arm {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cflags = -mthumb {cflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.s'):
|
|
self.print(
|
|
f'build {obj_out_path}: as_arm {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' asflags = {asflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_tlf(self, teak, out_dir='teak'):
|
|
'''
|
|
Adds a TLF file as data in the ARM9 binary to be used without filesystem
|
|
access.
|
|
'''
|
|
assert type(teak).__name__ == 'TeakBinary'
|
|
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
self.add_data_file(teak.tlf_path, full_out_dir)
|
|
|
|
def generate_elf(self):
|
|
'''
|
|
This function generates rules to build an ELF file.
|
|
'''
|
|
|
|
self._gen_rule_assets_barrier()
|
|
|
|
defines = ' '.join(['-D' + define for define in self.defines])
|
|
|
|
includedirs = self.includedirs + [self.out_assets_path]
|
|
|
|
includeflags = ' '.join('-isystem ' + path + '/include' for path in self.libdirs) + \
|
|
' ' + ' '.join('-I' + path for path in includedirs)
|
|
|
|
asflags = (
|
|
f'-x assembler-with-cpp {defines} {includeflags} '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.asflags}'
|
|
)
|
|
|
|
cflags = (
|
|
f'{defines} {includeflags} '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.cflags}'
|
|
)
|
|
|
|
cxxflags = (
|
|
f'{defines} {includeflags} -fno-exceptions -fno-rtti '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.cxxflags}'
|
|
)
|
|
|
|
self._gen_rules_source_arm(self.sourcedirs, self.out_dir,
|
|
asflags, cflags, cxxflags,
|
|
self.assets_c, self.flag_assets_name)
|
|
|
|
if self.has_cpp:
|
|
self.libs.extend(['stdc++', 'c'])
|
|
else:
|
|
self.libs.extend(['c'])
|
|
|
|
libs = ' '.join(['-l' + lib for lib in self.libs])
|
|
libdirsflags = ' '.join(['-L' + libdir + '/lib' for libdir in self.libdirs])
|
|
|
|
ldflags = (
|
|
f'{libdirsflags} -Wl,-Map,{self.map_path} {self.arch} '
|
|
f'-Wl,--start-group {libs} -Wl,--end-group {self.specs} {self.ldflags}'
|
|
)
|
|
|
|
obj_file_paths_str = ' '.join(self.obj_file_paths)
|
|
|
|
self.print(
|
|
f'build {self.elf_path} | {self.map_path}: ld_arm {obj_file_paths_str} || {self.out_dir}\n'
|
|
f' ldflags = {ldflags}\n'
|
|
'\n'
|
|
)
|
|
|
|
class Arm9Binary(GenericArmBinary):
|
|
'''
|
|
Class that represents an ARM9 CPU binary.
|
|
'''
|
|
|
|
ASSETS_BARRIER_ARM9 = 'assets_arm9_flag'
|
|
|
|
def __init__(self, *, sourcedirs, defines=[], includedirs=[],
|
|
libs=['nds9', 'mm9'],
|
|
libdirs=['${BLOCKSDS}/libs/libnds', '${BLOCKSDS}/libs/maxmod'],
|
|
asflags='',
|
|
cflags='-Wall -O2 -std=gnu11',
|
|
cxxflags='-Wall -O2 -std=gnu++14',
|
|
ldflags=''):
|
|
'''
|
|
Constructor of ARM9 binaries.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'sourcedirs': List of paths to directories with source code.
|
|
|
|
Optional arguments:
|
|
|
|
- 'defines': List of defines. Example: ['FEATURE_ON', FEATURE_LEVEL=2']
|
|
- 'includedirs': List of folders to be searched for headers.
|
|
- 'libs': List of libraries to be linked to the binary.
|
|
- 'libdirs': List of paths to be searched for libraries. The paths must
|
|
contain folders called 'include' and 'lib'.
|
|
- 'asflags': Optional flags to be passed to the assembler.
|
|
- 'cflags': Optional flags to be passed to the C compiler.
|
|
- 'cxxflags': Optional flags to be passed to the C++ compiler.
|
|
- 'ldflags': Optional flags to be passed to the linker.
|
|
'''
|
|
super().__init__(self.ASSETS_BARRIER_ARM9)
|
|
|
|
self.sourcedirs = sourcedirs
|
|
self.defines = defines
|
|
self.includedirs = includedirs
|
|
self.libs = libs
|
|
self.libdirs = libdirs
|
|
self.asflags = asflags
|
|
self.cflags = cflags
|
|
self.cxxflags = cxxflags
|
|
self.ldflags = ldflags
|
|
|
|
self.out_dir = 'build/arm9'
|
|
self.add_dir_target(self.out_dir)
|
|
|
|
self.out_assets_path = 'build/assets/arm9'
|
|
|
|
self.arch = '-mcpu=arm946e-s+nofp'
|
|
self.specs = '-specs=${BLOCKSDS}/sys/crts/ds_arm9.specs'
|
|
self.map_path = os.path.join(self.out_dir, 'arm9.map')
|
|
self.elf_path = os.path.join(self.out_dir, 'arm9.elf')
|
|
|
|
self.has_cpp = False
|
|
self.obj_file_paths = []
|
|
|
|
def add_grit(self, in_dirs, out_dir='grit'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.png' or '.jpg' and look for another '.grit' file
|
|
with the same base name. Then, it will create rules to convert them and
|
|
add them as data to the CPU binary.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', '_png'))
|
|
in_files = gen_input_file_list(in_dir, ('.jpg'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.jpg', '_jpg'))
|
|
|
|
for in_out_file in in_out_files:
|
|
grit_out_path = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(grit_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_img = in_out_file.in_path
|
|
in_path_grit = remove_ext(in_path_img) + '.grit'
|
|
|
|
out_path_c = grit_out_path + '.c'
|
|
out_path_h = grit_out_path + '.h'
|
|
self.assets_c.append(out_path_c)
|
|
self.assets_h.append(out_path_h)
|
|
|
|
self.print(
|
|
f'build {out_path_c} {out_path_h}: grit {in_path_img} {in_path_grit} || {out_path_dir}\n'
|
|
f' in_path_img = {in_path_img}\n'
|
|
f' grit_out_path = {grit_out_path}\n'
|
|
f' options = -ftc -W1\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_gl2d_sprite_set(self, in_dir, in_path_grit, width=0, height=0, out_dir='gl2d'):
|
|
'''
|
|
This function takes as input a directory full with PNG files and
|
|
generates a combined texture from them. It is possible to specify the
|
|
dimensions of the combined texture. If they aren't specified, the tool
|
|
will try different sizes until one works.
|
|
|
|
It is also required to provide a ".grit" file to convert the final
|
|
combined texture to a DS format.
|
|
'''
|
|
# Get name of the directory from the directory path
|
|
atlas_name = get_file_name(in_dir)
|
|
|
|
base_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
full_out_dir = os.path.join(base_out_dir, atlas_name)
|
|
|
|
self.add_dir_target(full_out_dir)
|
|
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
|
|
out_path_c = os.path.join(full_out_dir, atlas_name + '.c')
|
|
out_path_h = os.path.join(full_out_dir, atlas_name + '.h')
|
|
|
|
out_path_png = os.path.join(full_out_dir, atlas_name + '_texture.png')
|
|
|
|
in_files_paths = " ".join(in_files)
|
|
|
|
# This rule must depend on both the PNG files (in case they change) and
|
|
# the directory that contains them (in case new files are added).
|
|
self.print(
|
|
f'build {out_path_png} {out_path_c} {out_path_h} : squeezerw {in_dir} {in_files_paths} || {full_out_dir}\n'
|
|
f' args = --width {width} --height {height} --verbose '
|
|
f'--outputTexture {out_path_png} --outputBaseName {atlas_name} '
|
|
f'--outputH {out_path_h} --outputC {out_path_c} {in_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
# When grit runs, the ".grit" file must be in the same folder as the
|
|
# ".png" file and have the same name. It is needed to copy it where the
|
|
# final ".png" file with the atlas is generated.
|
|
out_path_grit_copy = os.path.join(full_out_dir, atlas_name + '_texture.grit')
|
|
|
|
self.print(
|
|
f'build {out_path_grit_copy} : copy {in_path_grit} || {full_out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
out_path_grit = os.path.join(full_out_dir, atlas_name + '_texture_png')
|
|
out_path_png_c = os.path.join(full_out_dir, atlas_name + '_texture_png.c')
|
|
out_path_png_h = os.path.join(full_out_dir, atlas_name + '_texture_png.h')
|
|
|
|
self.print(
|
|
f'build {out_path_png_c} {out_path_png_h}: grit {out_path_png} {out_path_grit_copy} || {full_out_dir}\n'
|
|
f' in_path_img = {out_path_png}\n'
|
|
f' grit_out_path = {out_path_grit}\n'
|
|
f' options = -ftc -W1\n'
|
|
'\n'
|
|
)
|
|
|
|
self.assets_c.extend([out_path_c, out_path_png_c])
|
|
self.assets_h.extend([out_path_h, out_path_png_h])
|
|
|
|
def add_mmutil(self, in_dirs, name='soundbank', out_dir='maxmod'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with the extensions '.wav', '.mod', '.s3m', '.it' and '.xm', and
|
|
it will build a Maxmod soundbank with the name provided in 'name'. This
|
|
soundbank will be added as data to the ARM9 binary so it can be used
|
|
without filesystem access.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
self.add_dir_target(full_out_dir)
|
|
|
|
in_audio_files = []
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.it', '.mod', '.s3m', '.xm', '.wav'))
|
|
in_audio_files.extend(in_files)
|
|
|
|
out_path_base = os.path.join(full_out_dir, name)
|
|
|
|
out_path_bin = out_path_base + '.bin'
|
|
out_path_bin_c = out_path_base + '_bin.c'
|
|
out_path_bin_h = out_path_base + '_bin.h'
|
|
out_path_info_h = out_path_base + '_info.h'
|
|
|
|
self.assets_c.extend([out_path_bin_c])
|
|
self.assets_h.extend([out_path_info_h, out_path_bin_h])
|
|
|
|
all_audio_files = ' '.join(in_audio_files)
|
|
self.print(
|
|
f'build {out_path_bin} {out_path_info_h} : mmutil {all_audio_files} || {full_out_dir}\n'
|
|
f' soundbank_bin = {out_path_bin}\n'
|
|
f' soundbank_info_h = {out_path_info_h}\n'
|
|
'\n'
|
|
f'build {out_path_bin_c} {out_path_bin_h}: bin2c {out_path_bin} || {full_out_dir}\n'
|
|
f' outdir = {full_out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_nitro_engine_obj(self, in_dirs, out_dir='models'):
|
|
'''
|
|
Nitro Engine: This function gets as input a list of directories. It will
|
|
look for files with extension '.obj' and look for another '.json' file
|
|
with the same base name. Then, it will create rules to convert them and
|
|
add them as data to the CPU binary.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.obj'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.obj', '.dl'))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_dl = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(out_path_dl)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_obj = in_out_file.in_path
|
|
in_path_json = remove_ext(in_path_obj) + '.json'
|
|
|
|
json_data = load_json(in_path_json)
|
|
assert 'texture' in json_data, 'Texture size must be provided'
|
|
|
|
args = (
|
|
'--texture ' + str(json_data['texture'][0]) + ' ' +
|
|
str(json_data['texture'][1])
|
|
)
|
|
|
|
if 'scale' in json_data:
|
|
args += ' --scale ' + str(json_data['scale'])
|
|
|
|
if 'use-vertex-color' in json_data:
|
|
if json_data['use-vertex-color']: # Only add this if True
|
|
args += ' --use-vertex-color '
|
|
|
|
self.print(
|
|
f'build {out_path_dl} : obj2dl {in_path_obj} {in_path_json} || {out_path_dir}\n'
|
|
f' in_path_obj = {in_path_obj}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
# The resulting binary file needs to be converted to C and H files
|
|
self.add_data_file(out_path_dl, out_path_dir)
|
|
|
|
def add_nitro_engine_md5(self, in_dirs, out_dir='models'):
|
|
'''
|
|
Nitro Engine: Looks for md5mesh files in the provided directores. Each
|
|
file must be acompanied by a json file with some information. For
|
|
example:
|
|
|
|
{
|
|
"texture": [256, 256],
|
|
"blender-fix": true,
|
|
"export-base-pose": false,
|
|
"animations": [
|
|
{
|
|
"file": "wave.md5anim",
|
|
"skip-frames": 1
|
|
}
|
|
]
|
|
}
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
md5mesh_in_out_files =[]
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.md5mesh'))
|
|
md5mesh_in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.md5mesh', ''))
|
|
|
|
for in_out_file in md5mesh_in_out_files:
|
|
out_path_dir = get_parent_dir(in_out_file.out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_md5mesh = in_out_file.in_path
|
|
in_path_json = replace_ext(in_path_md5mesh, '.md5mesh', '.json')
|
|
|
|
json_data = load_json(in_path_json)
|
|
assert 'texture' in json_data, 'Texture size must be provided'
|
|
|
|
args = (
|
|
'--texture ' + str(json_data['texture'][0]) + ' ' +
|
|
str(json_data['texture'][1])
|
|
)
|
|
|
|
if 'blender-fix' in json_data:
|
|
if json_data['blender-fix']:
|
|
args += ' --blender-fix'
|
|
|
|
base_name = remove_ext(get_file_name(in_path_md5mesh))
|
|
|
|
args += f' --name {base_name} --output {out_path_dir} --model {in_path_md5mesh}'
|
|
|
|
out_path_dsm = in_out_file.out_path + '.dsm'
|
|
|
|
args_str = ' '.join(args)
|
|
|
|
self.print(
|
|
f'build {out_path_dsm} : md5_to_dsma {in_path_md5mesh} {in_path_json} || {out_path_dir}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
# The resulting binary file needs to be converted to C and H files
|
|
self.add_data_file(out_path_dsm, out_path_dir)
|
|
|
|
if 'animations' in json_data:
|
|
in_path_dir = get_parent_dir(in_out_file.in_path)
|
|
|
|
for animation in json_data['animations']:
|
|
assert 'file' in animation
|
|
in_path_md5anim = os.path.join(in_path_dir, animation['file'])
|
|
|
|
args = f' --name {base_name} --output {out_path_dir} --anim {in_path_md5anim}'
|
|
|
|
if 'skip-frames' in animation:
|
|
args += ' --skip-frames ' + str(animation['skip-frames'])
|
|
|
|
if 'blender-fix' in json_data:
|
|
if json_data['blender-fix']:
|
|
args += ' --blender-fix'
|
|
|
|
base_name_anim = remove_ext(get_file_name(in_path_md5anim))
|
|
|
|
out_path_dsa = in_out_file.out_path + '_' + base_name_anim + '.dsa'
|
|
|
|
args_str = ' '.join(args)
|
|
|
|
self.print(
|
|
f'build {out_path_dsa} : md5_to_dsma {in_path_md5anim} {in_path_json} || {out_path_dir}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
# The resulting binary file needs to be converted to C and H files
|
|
self.add_data_file(out_path_dsa, out_path_dir)
|
|
|
|
def add_ptexconv_tex4x4(self, in_dirs, out_dir='ptexconv'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.png' and '.jpg'. Then, it will create rules to
|
|
convert them and add them to Texel 4x4 format textures and add them to
|
|
the CPU binary as data.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', '_png'))
|
|
in_files = gen_input_file_list(in_dir, ('.jpg'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.jpg', '_jpg'))
|
|
|
|
for in_out_file in in_out_files:
|
|
ptexconv_out_path = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(ptexconv_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_png = in_out_file.in_path
|
|
|
|
out_path_tex = ptexconv_out_path + '_tex.bin'
|
|
out_path_idx = ptexconv_out_path + '_idx.bin'
|
|
out_path_pal = ptexconv_out_path + '_pal.bin'
|
|
|
|
self.print(
|
|
f'build {out_path_tex} {out_path_idx} {out_path_pal} : ptexconv {in_path_png} || {out_path_dir}\n'
|
|
f' args = -gt -ob -k FF00FF -v -f tex4x4 -o {ptexconv_out_path} {in_path_png}\n'
|
|
'\n'
|
|
)
|
|
|
|
self.add_data_file(out_path_tex, out_path_dir)
|
|
self.add_data_file(out_path_idx, out_path_dir)
|
|
self.add_data_file(out_path_pal, out_path_dir)
|
|
|
|
class Arm9DynamicLibrary(GenericArmBinary):
|
|
'''
|
|
Class that represents a dynamic library for the ARM9.
|
|
'''
|
|
|
|
ASSETS_BARRIER_ARM9_DYNAMIC_LIBRARY = 'assets_arm9_dynamic_library_flag_'
|
|
|
|
def __init__(self, *, name, main_binary=None, sourcedirs, defines=[],
|
|
includedirs=[], libs=[], libdirs=[],
|
|
asflags='',
|
|
cflags='-Wall -O2 -std=gnu11',
|
|
cxxflags='-Wall -O2 -std=gnu++14',
|
|
ldflags=''):
|
|
'''
|
|
Constructor of ARM9 dynamic libraries.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'name': Name to identify this specific ARM9 dynamic library.
|
|
- 'sourcedirs': List of paths to directories with source code.
|
|
|
|
Optional arguments:
|
|
|
|
- 'main_binary': Arm9Binary object to be used to resolve unknown symbols.
|
|
- 'defines': List of defines. Example: ['FEATURE_ON', FEATURE_LEVEL=2']
|
|
- 'includedirs': List of folders to be searched for headers.
|
|
- 'libs': List of libraries to be linked to the binary.
|
|
- 'libdirs': List of paths to be searched for libraries. The paths must
|
|
contain folders called 'include' and 'lib'.
|
|
- 'asflags': Optional flags to be passed to the assembler.
|
|
- 'cflags': Optional flags to be passed to the C compiler.
|
|
- 'cxxflags': Optional flags to be passed to the C++ compiler.
|
|
- 'ldflags': Optional flags to be passed to the linker.
|
|
'''
|
|
super().__init__(self.ASSETS_BARRIER_ARM9_DYNAMIC_LIBRARY + str(name))
|
|
|
|
if main_binary is not None:
|
|
assert type(main_binary).__name__ == 'Arm9Binary'
|
|
self.main_binary = main_binary
|
|
|
|
self.sourcedirs = sourcedirs
|
|
self.defines = defines
|
|
self.includedirs = includedirs
|
|
self.libs = libs
|
|
self.libdirs = libdirs
|
|
self.asflags = asflags
|
|
self.cflags = cflags
|
|
self.cxxflags = cxxflags
|
|
self.ldflags = ldflags
|
|
|
|
self.out_dir = os.path.join('build', name)
|
|
self.add_dir_target(self.out_dir)
|
|
|
|
self.out_assets_path = f'build/assets/{name}'
|
|
|
|
self.arch = '-mcpu=arm946e-s+nofp'
|
|
self.specs = '-specs=${BLOCKSDS}/sys/crts/ds_arm9_dsl.specs'
|
|
self.map_path = os.path.join(self.out_dir, name + '.map')
|
|
self.elf_path = os.path.join(self.out_dir, name + '.elf')
|
|
self.dsl_path = os.path.join(self.out_dir, name + '.dsl')
|
|
self.dsl_standalone_path = name + '.dsl'
|
|
|
|
self.has_cpp = False
|
|
self.obj_file_paths = []
|
|
|
|
def generate_dsl(self):
|
|
'''
|
|
This function generates rules to build an ELF and a DSL file.
|
|
'''
|
|
|
|
self._gen_rule_assets_barrier()
|
|
|
|
defines = ' '.join(['-D' + define for define in self.defines])
|
|
|
|
includedirs = self.includedirs + [self.out_assets_path]
|
|
|
|
includeflags = ' '.join('-isystem ' + path + '/include' for path in self.libdirs) + \
|
|
' ' + ' '.join('-I' + path for path in includedirs)
|
|
|
|
asflags = (
|
|
f'-x assembler-with-cpp {defines} {includeflags} '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.asflags} -fvisibility=hidden'
|
|
)
|
|
|
|
cflags = (
|
|
f'{defines} {includeflags} '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.cflags} -fvisibility=hidden'
|
|
)
|
|
|
|
cxxflags = (
|
|
f'{defines} {includeflags} -fno-exceptions -fno-rtti '
|
|
f'-ffunction-sections -fdata-sections {self.specs} '
|
|
f'{self.arch} {self.cxxflags} -fvisibility=hidden'
|
|
)
|
|
|
|
self._gen_rules_source_arm(self.sourcedirs, self.out_dir,
|
|
asflags, cflags, cxxflags,
|
|
self.assets_c, self.flag_assets_name)
|
|
|
|
libs = ' '.join(['-l' + lib for lib in self.libs])
|
|
libdirsflags = ' '.join(['-L' + libdir + '/lib' for libdir in self.libdirs])
|
|
|
|
ldflags = (
|
|
f'{libdirsflags} -Wl,-Map,{self.map_path} {self.arch} '
|
|
f'-nostdlib -Wl,--start-group {libs} -Wl,--end-group '
|
|
f'{self.specs} {self.ldflags} '
|
|
f'-Wl,--emit-relocs -Wl,--unresolved-symbols=ignore-all -Wl,--nmagic'
|
|
)
|
|
|
|
obj_file_paths_str = ' '.join(self.obj_file_paths)
|
|
|
|
self.print(
|
|
f'build {self.elf_path} | {self.map_path}: ld_arm {obj_file_paths_str} || {self.out_dir}\n'
|
|
f' ldflags = {ldflags}\n'
|
|
'\n'
|
|
)
|
|
|
|
if self.main_binary is None:
|
|
self.print(
|
|
f'build {self.dsl_path}: dsltool {self.elf_path} || {self.out_dir}\n'
|
|
f' elf_path = {self.elf_path}\n'
|
|
f' args = \n'
|
|
'\n'
|
|
)
|
|
else:
|
|
self.print(
|
|
f'build {self.dsl_path}: dsltool {self.elf_path} {self.main_binary.elf_path} || {self.out_dir}\n'
|
|
f' elf_path = {self.elf_path}\n'
|
|
f' args = -m {self.main_binary.elf_path}\n'
|
|
'\n'
|
|
)
|
|
|
|
def generate_dsl_standalone(self):
|
|
'''
|
|
This function generates rules to use build tools, combines the rules to
|
|
build all source and assets, and it generates rules to build the final
|
|
DSL file.
|
|
|
|
This function will generate the full output file with rules, the
|
|
Arm9DynamicLibrary object doesn't need to be added to a
|
|
GenericFilesystem object (such as NitroFS or FatFS).
|
|
'''
|
|
# Save the file to the top level folder rather than build/
|
|
self.dsl_path = self.dsl_standalone_path
|
|
|
|
# General rules for all used tools
|
|
self._gen_rules_tools()
|
|
|
|
self.generate_dsl()
|
|
|
|
# Rules to generate all directories
|
|
self._gen_rules_build_directories()
|
|
|
|
class Arm7BinaryDefault():
|
|
'''
|
|
Class that represents the default ARM7 of BlocksDS.
|
|
'''
|
|
|
|
def __init__(self,
|
|
elf_path='${BLOCKSDS}/sys/arm7/main_core/arm7_dswifi_maxmod.elf'):
|
|
self.elf_path = elf_path
|
|
self.contents = ''
|
|
self.dir_targets = []
|
|
|
|
class Arm7Binary(GenericArmBinary):
|
|
'''
|
|
Class that represents an ARM7 CPU binary.
|
|
'''
|
|
|
|
ASSETS_BARRIER_ARM7 = 'assets_arm7_flag'
|
|
|
|
def __init__(self, *, sourcedirs, defines=[], includedirs=[],
|
|
libs=['nds7', 'mm7', 'dswifi7'],
|
|
libdirs=['${BLOCKSDS}/libs/libnds', '${BLOCKSDS}/libs/maxmod',
|
|
'${BLOCKSDS}/libs/dswifi'],
|
|
asflags='',
|
|
cflags='-Wall -O2 -std=gnu11',
|
|
cxxflags='-Wall -O2 -std=gnu++14',
|
|
ldflags=''):
|
|
'''
|
|
Constructor of ARM7 binaries.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'sourcedirs': List of paths to directories with source code.
|
|
|
|
Optional arguments:
|
|
|
|
- 'defines': List of defines. Example: ['FEATURE_ON', FEATURE_LEVEL=2']
|
|
- 'includedirs': List of folders to be searched for headers.
|
|
- 'libs': List of libraries to be linked to the binary.
|
|
- 'libdirs': List of paths to be searched for libraries. The paths must
|
|
contain folders called 'include' and 'lib'.
|
|
- 'asflags': Optional flags to be passed to the assembler.
|
|
- 'cflags': Optional flags to be passed to the C compiler.
|
|
- 'cxxflags': Optional flags to be passed to the C++ compiler.
|
|
- 'ldflags': Optional flags to be passed to the linker.
|
|
'''
|
|
|
|
super().__init__(self.ASSETS_BARRIER_ARM7)
|
|
|
|
self.sourcedirs = sourcedirs
|
|
self.defines = defines
|
|
self.includedirs = includedirs
|
|
self.libs = libs
|
|
self.libdirs = libdirs
|
|
self.asflags = asflags
|
|
self.cflags = cflags
|
|
self.cxxflags = cxxflags
|
|
self.ldflags = ldflags
|
|
|
|
self.out_dir = 'build/arm7'
|
|
self.add_dir_target(self.out_dir)
|
|
|
|
self.out_assets_path = 'build/assets/arm7'
|
|
|
|
self.arch = '-mcpu=arm7tdmi'
|
|
self.specs = '-specs=${BLOCKSDS}/sys/crts/ds_arm7.specs'
|
|
self.map_path = os.path.join(self.out_dir, 'arm7.map')
|
|
self.elf_path = os.path.join(self.out_dir, 'arm7.elf')
|
|
|
|
self.has_cpp = False
|
|
self.obj_file_paths = []
|
|
|
|
class TeakBinary(GenericCpuBinary):
|
|
'''
|
|
Class that represents a Teak CPU binary.
|
|
'''
|
|
|
|
ASSETS_BARRIER_TEAK = 'assets_teak_flag_'
|
|
|
|
def __init__(self, *, name, sourcedirs, defines=[], includedirs=[],
|
|
libs=['teak'],
|
|
libdirs=['${BLOCKSDS}/libs/libteak'],
|
|
asflags='',
|
|
cflags='-Wall -O2 -std=gnu11',
|
|
cxxflags='-Wall -O2 -std=gnu++14'):
|
|
'''
|
|
Constructor of Teak binaries.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'name': Name to identify this specific Teak binary.
|
|
- 'sourcedirs': List of paths to directories with source code.
|
|
|
|
Optional arguments:
|
|
|
|
- 'defines': List of defines. Example: ['FEATURE_ON', FEATURE_LEVEL=2']
|
|
- 'includedirs': List of folders to be searched for headers.
|
|
- 'libs': List of libraries to be linked to the binary.
|
|
- 'libdirs': List of paths to be searched for libraries. The paths must
|
|
contain folders called 'include' and 'lib'.
|
|
- 'asflags': Optional flags to be passed to the assembler.
|
|
- 'cflags': Optional flags to be passed to the C compiler.
|
|
- 'cxxflags': Optional flags to be passed to the C++ compiler.
|
|
'''
|
|
super().__init__(self.ASSETS_BARRIER_TEAK + str(name))
|
|
|
|
self.name = name
|
|
self.sourcedirs = sourcedirs
|
|
self.includedirs = includedirs
|
|
self.libs = libs
|
|
self.libdirs = libdirs
|
|
self.asflags = asflags
|
|
self.cflags = cflags
|
|
self.cxxflags = cxxflags
|
|
|
|
self.defines = ['__NDS__', 'TEAK'] + defines
|
|
|
|
self.out_dir = os.path.join('build', name)
|
|
self.add_dir_target(self.out_dir)
|
|
|
|
self.has_cpp = False
|
|
self.obj_file_paths = []
|
|
|
|
def _gen_rules_source_teak(self, in_dirs, out_dir, asflags, cflags, cxxflags,
|
|
assets_c_files, assets_h_flag):
|
|
'''
|
|
Generates rules to build all provided source files, and adds additional
|
|
headers as dependencies of the source files.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'in_dirs': List of paths to directories with source code.
|
|
- 'out_dir': Base path to store all build results.
|
|
- 'asflags': All flags to be passed to the assembler.
|
|
- 'cflags': All flags to be passed to the C compiler.
|
|
- 'cxxflags': All flags to be passed to the C++ compiler.
|
|
- 'assets_c_files': Additional C files (result of converting assets).
|
|
- 'assets_h_files': Additional H files (result of converting assets).
|
|
'''
|
|
|
|
in_out_files = []
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.c', '.cpp', '.s'))
|
|
in_out_files.extend(gen_out_file_list(in_files, '', out_dir + '/', '', '.o'))
|
|
|
|
in_out_files.extend(gen_out_file_list(assets_c_files, '', out_dir + '/', '', '.o'))
|
|
|
|
for in_out_file in in_out_files:
|
|
obj_out_path = in_out_file.out_path
|
|
dep_out_path = replace_ext(obj_out_path, '.o', '.d')
|
|
|
|
in_path = in_out_file.in_path
|
|
|
|
out_path_dir = get_parent_dir(obj_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
self.obj_file_paths.append(obj_out_path)
|
|
|
|
if in_path.endswith('.cpp'):
|
|
self.has_cpp = True
|
|
|
|
if in_path.endswith('.cpp'):
|
|
self.print(
|
|
f'build {obj_out_path}: cxx_teak {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cxxflags = {cxxflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.c'):
|
|
self.print(
|
|
f'build {obj_out_path}: cc_teak {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' cflags = {cflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
elif in_path.endswith('.s'):
|
|
self.print(
|
|
f'build {obj_out_path}: as_teak {in_path} || {out_path_dir} {assets_h_flag}\n'
|
|
f' asflags = {asflags}\n'
|
|
f' dep = {dep_out_path}\n'
|
|
'\n'
|
|
)
|
|
|
|
def generate_tlf(self):
|
|
'''
|
|
This function generates rules to build a TLF file.
|
|
'''
|
|
self._gen_rule_assets_barrier()
|
|
|
|
arch = '--target=teak -march=teak'
|
|
defines = ' '.join(['-D' + define for define in self.defines])
|
|
|
|
# TODO: Support assets
|
|
|
|
includeflags = ' '.join('-isystem ' + path + '/include' for path in self.libdirs) + \
|
|
' ' + ' '.join('-I' + path for path in self.includedirs)
|
|
|
|
asflags = (
|
|
f'-x assembler-with-cpp {defines} {arch} {includeflags} '
|
|
'-integrated-as -nostdlib -ffreestanding -fno-builtin '
|
|
f'{self.asflags}'
|
|
)
|
|
|
|
cflags = (
|
|
f'{defines} {arch} {includeflags} '
|
|
'-integrated-as -nostdlib -ffreestanding -fno-builtin '
|
|
f'{self.cflags}'
|
|
)
|
|
|
|
cxxflags = (
|
|
f'{defines} {arch} {includeflags} '
|
|
'-integrated-as -nostdlib -ffreestanding -fno-builtin '
|
|
'-fno-rtti -fno-exceptions '
|
|
f'{self.cxxflags}'
|
|
)
|
|
|
|
self._gen_rules_source_teak(self.sourcedirs, self.out_dir,
|
|
asflags, cflags, cxxflags,
|
|
self.assets_c, self.flag_assets_name)
|
|
|
|
libs = ' '.join(['-l' + lib for lib in self.libs])
|
|
libdirsflags = ' '.join(['-L' + libdir + '/lib' for libdir in self.libdirs])
|
|
|
|
map_path = os.path.join(self.out_dir, 'teak.map')
|
|
|
|
ldflags = (
|
|
f'{libdirsflags} -Map {map_path} -nostdlib '
|
|
'-T${BLOCKSDS}/libs/libteak/teak.ld '
|
|
f'--start-group {libs} --end-group'
|
|
)
|
|
|
|
elf_path = os.path.join(self.out_dir, 'teak.elf')
|
|
obj_file_paths_str = ' '.join(self.obj_file_paths)
|
|
|
|
self.tlf_path = os.path.join(self.out_dir, self.name + '.tlf')
|
|
|
|
self.print(
|
|
f'build {elf_path} | {map_path}: ld_teak {obj_file_paths_str} || {self.out_dir}\n'
|
|
f' ldflags = {ldflags}\n'
|
|
'\n'
|
|
f'build {self.tlf_path}: teaktool {elf_path} || {self.out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
class GenericFilesystem(GenericBinary):
|
|
'''
|
|
Class that defines rules to add files to a generic filesystem (SD or
|
|
NitroFS) with a previous conversion step. This can be used for graphics,
|
|
music, etc.
|
|
'''
|
|
|
|
def __init__(self, flag_assets_name, out_assets_path, out_temp_path):
|
|
super().__init__(flag_assets_name)
|
|
|
|
self.target_files = []
|
|
|
|
self.out_assets_path = out_assets_path
|
|
self.out_temp_path = out_temp_path
|
|
self.add_dir_target(self.out_assets_path)
|
|
|
|
def _gen_rule_assets_barrier(self):
|
|
'''
|
|
This generates a common phony target to all the files inside the
|
|
filesystem. This phony target can be used instead of all the files when
|
|
another target depends on the filesystem as a whole, like the NDS ROM.
|
|
'''
|
|
flag_path = self.flag_assets_name
|
|
file_paths_str = ' '.join(self.target_files)
|
|
self.print(
|
|
f'build {flag_path}: phony {file_paths_str}\n'
|
|
'\n'
|
|
)
|
|
|
|
def generate_image(self):
|
|
'''
|
|
This generates rules required to complete the filesystem image after all
|
|
the files have been added to it.
|
|
'''
|
|
self._gen_rule_assets_barrier()
|
|
|
|
def add_grit(self, in_dirs, out_dir='grit'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.png' and look for another '.grit' file with the
|
|
same base name. Then, it will create rules to convert them and add them
|
|
to the filesystem.
|
|
|
|
This rule will create GRF files:
|
|
|
|
https://www.coranac.com/man/grit/html/grit.htm
|
|
|
|
GRF files are used because it's easier to keep track of the outputs of
|
|
the rule. If not, this rule would need to parse the '.grit' file to
|
|
determine if maps, tilesets or palettes are going to be generated so
|
|
that the dependencies can be created correctly.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', '_png'))
|
|
|
|
for in_out_file in in_out_files:
|
|
grit_out_path = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(grit_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_png = in_out_file.in_path
|
|
in_path_grit = replace_ext(in_path_png, '.png', '.grit')
|
|
|
|
out_path_grf = grit_out_path + '.grf'
|
|
|
|
self.target_files.append(out_path_grf)
|
|
|
|
self.print(
|
|
f'build {out_path_grf} : grit {in_path_png} {in_path_grit} || {out_path_dir}\n'
|
|
f' in_path_img = {in_path_png}\n'
|
|
f' grit_out_path = {grit_out_path}\n'
|
|
f' options = -ftr -fh! -W1\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_ptexconv_tex4x4(self, in_dirs, out_dir='ptexconv'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.png' and '.jpg'. Then, it will create rules to
|
|
convert them and add them to Texel 4x4 format textures and add them to
|
|
the filesystem as '.bin' files.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', '_png'))
|
|
in_files = gen_input_file_list(in_dir, ('.jpg'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.jpg', '_jpg'))
|
|
|
|
for in_out_file in in_out_files:
|
|
ptexconv_out_path = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(ptexconv_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_png = in_out_file.in_path
|
|
|
|
out_path_tex = ptexconv_out_path + '_tex.bin'
|
|
out_path_idx = ptexconv_out_path + '_idx.bin'
|
|
out_path_pal = ptexconv_out_path + '_pal.bin'
|
|
|
|
self.target_files.extend([out_path_tex, out_path_idx, out_path_pal])
|
|
|
|
self.print(
|
|
f'build {out_path_tex} {out_path_idx} {out_path_pal} : ptexconv {in_path_png} || {out_path_dir}\n'
|
|
f' args = -gt -ob -k FF00FF -v -f tex4x4 -o {ptexconv_out_path} {in_path_png}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_bmfont_fnt(self, in_dirs, out_dir='bmfont'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with extension '.fnt' (they must be exported in binary format, not
|
|
text or xml format) and copy them to the filesystem.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.fnt'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '', ''))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_dir = get_parent_dir(in_out_file.out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path = in_out_file.in_path
|
|
out_path = in_out_file.out_path
|
|
|
|
self.target_files.extend([out_path])
|
|
|
|
self.print(
|
|
f'build {out_path} : copy {in_path} || {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_gl2d_sprite_set(self, in_dir, in_path_grit, width=0, height=0, out_dir='gl2d'):
|
|
'''
|
|
This function takes as input a directory full with PNG files and
|
|
generates a combined texture from them. It is possible to specify the
|
|
dimensions of the combined texture. If they aren't specified, the tool
|
|
will try different sizes until one works.
|
|
|
|
It is also required to provide a ".grit" file to convert the final
|
|
combined texture to a DS format.
|
|
'''
|
|
# Get name of the directory from the directory path
|
|
atlas_name = get_file_name(in_dir)
|
|
|
|
base_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
full_out_dir = os.path.join(base_out_dir, atlas_name)
|
|
|
|
base_out_temp_dir = os.path.join(self.out_temp_path, out_dir)
|
|
full_out_temp_dir = os.path.join(base_out_temp_dir, atlas_name)
|
|
|
|
self.add_dir_target(full_out_dir)
|
|
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
|
|
# The GUV file needs to be saved to the filesystem, the PNG to a
|
|
# temporary directory.
|
|
out_path_guv = os.path.join(full_out_dir, atlas_name + '.guv')
|
|
out_path_png = os.path.join(full_out_temp_dir, atlas_name + '_texture.png')
|
|
|
|
in_files_paths = " ".join(in_files)
|
|
|
|
# This rule must depend on both the PNG files (in case they change) and
|
|
# the directory that contains them (in case new files are added).
|
|
self.print(
|
|
f'build {out_path_png} {out_path_guv} : squeezerw {in_dir} {in_files_paths} || {full_out_dir}\n'
|
|
f' args = --width {width} --height {height} --verbose '
|
|
f'--outputTexture {out_path_png} --outputNitro {out_path_guv} {in_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
# When grit runs, the ".grit" file must be in the same folder as the
|
|
# ".png" file and have the same name. It is needed to copy it where the
|
|
# final ".png" file with the atlas is generated.
|
|
out_path_grit_copy = os.path.join(full_out_temp_dir, atlas_name + '_texture.grit')
|
|
|
|
self.print(
|
|
f'build {out_path_grit_copy} : copy {in_path_grit} || {full_out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
# The GRF file needs to be saved to the filesystem
|
|
out_path_grit = os.path.join(full_out_dir, atlas_name + '_texture')
|
|
out_path_grf = os.path.join(full_out_dir, atlas_name + '_texture.grf')
|
|
|
|
self.print(
|
|
f'build {out_path_grf} : grit {out_path_png} {out_path_grit_copy} || {full_out_dir}\n'
|
|
f' in_path_img = {out_path_png}\n'
|
|
f' grit_out_path = {out_path_grit}\n'
|
|
f' options = -ftr -fh! -W1\n'
|
|
'\n'
|
|
)
|
|
|
|
self.target_files.extend([out_path_grf, out_path_guv])
|
|
|
|
def add_mmutil(self, in_dirs, name='soundbank', out_dir_h='build/assets/arm9/nitrofs', out_dir_bin='maxmod'):
|
|
'''
|
|
This function gets as input a list of directories. It will look for
|
|
files with the extensions '.wav', '.mod', '.s3m', '.it' and '.xm', and
|
|
it will build a Maxmod soundbank with the name provided in 'name'. This
|
|
soundbank will be added to the filesystem at the path specified in
|
|
'out_dir_bin'.
|
|
|
|
Note that this process generates a header file with the definitions
|
|
required to use the soundbank. The header is saved to the directory
|
|
'out_dir_h', and it must be passed as an additional header file to the
|
|
ARM9 binary. For example:
|
|
|
|
nitrofs_soundbank_header = nitrofs.add_mmutil(['nitrofs/audio'])
|
|
|
|
[...]
|
|
|
|
arm9.add_header_dependencies([nitrofs_soundbank_header])
|
|
'''
|
|
full_out_dir_bin = os.path.join(self.out_assets_path, out_dir_bin)
|
|
self.add_dir_target(full_out_dir_bin)
|
|
|
|
full_out_dir_h = out_dir_h
|
|
self.add_dir_target(full_out_dir_h)
|
|
|
|
in_audio_files = []
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.it', '.mod', '.s3m', '.xm', '.wav'))
|
|
in_audio_files.extend(in_files)
|
|
|
|
out_path_bin = os.path.join(full_out_dir_bin, name + '.bin')
|
|
out_path_info_h = os.path.join(full_out_dir_h, name + '_info.h')
|
|
|
|
self.target_files.append(out_path_bin)
|
|
|
|
all_audio_files = ' '.join(in_audio_files)
|
|
self.print(
|
|
f'build {out_path_bin} {out_path_info_h} : mmutil {all_audio_files} || {full_out_dir_bin} {full_out_dir_h}\n'
|
|
f' soundbank_bin = {out_path_bin}\n'
|
|
f' soundbank_info_h = {out_path_info_h}\n'
|
|
'\n'
|
|
)
|
|
|
|
return out_path_info_h
|
|
|
|
def add_tlf(self, teak, out_dir='teak'):
|
|
'''
|
|
Adds a TLF file to the filesystem.
|
|
'''
|
|
assert type(teak).__name__ == 'TeakBinary'
|
|
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
self.add_dir_target(full_out_dir)
|
|
|
|
out_tlf = os.path.join(full_out_dir, get_file_name(teak.tlf_path))
|
|
|
|
self.print(
|
|
f'build {out_tlf}: copy {teak.tlf_path} || {full_out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
self.target_files.append(out_tlf)
|
|
|
|
def add_arm9_dsl(self, dynamic_lib, out_dir='dsl'):
|
|
'''
|
|
Adds an ARM9 dynamic library file to the filesystem.
|
|
'''
|
|
assert type(dynamic_lib).__name__ == 'Arm9DynamicLibrary'
|
|
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
self.add_dir_target(full_out_dir)
|
|
|
|
out_dsl = os.path.join(full_out_dir, get_file_name(dynamic_lib.dsl_path))
|
|
|
|
self.print(
|
|
f'build {out_dsl}: copy {dynamic_lib.dsl_path} || {full_out_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
self.target_files.append(out_dsl)
|
|
|
|
def add_files_unchanged(self, in_dirs, out_dir='files'):
|
|
'''
|
|
This function takes a list of directories and injects them right away to
|
|
the filesystem.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir)
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '', ''))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_dir = get_parent_dir(in_out_file.out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path = in_out_file.in_path
|
|
out_path = in_out_file.out_path
|
|
|
|
self.print(
|
|
f'build {out_path} : copy {in_path} || {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def _add_nflib_gfx(self, in_dirs, out_dir, xp_map, xp_img, xp_pal, grit_args):
|
|
'''
|
|
NFLib: Generic rule to use grit to convert graphics files in png and jpg
|
|
format that don't share palettes or tilesets. It is possible to specify
|
|
what files to export (map, tileset, palette) by using the 'xp_map',
|
|
'xp_img' and 'xp_pal' parameters.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', ''))
|
|
|
|
in_files = gen_input_file_list(in_dir, ('.jpg'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.jpg', ''))
|
|
|
|
for in_out_file in in_out_files:
|
|
grit_out_path = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(grit_out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_png = in_out_file.in_path
|
|
|
|
out_files = []
|
|
grit_enable_args = []
|
|
|
|
if xp_map:
|
|
out_path_map = grit_out_path + '.map'
|
|
out_files.append(out_path_map)
|
|
else:
|
|
grit_enable_args.append('-m!')
|
|
|
|
if xp_img:
|
|
out_path_img = grit_out_path + '.img'
|
|
out_files.append(out_path_img)
|
|
else:
|
|
grit_enable_args.append('-g!')
|
|
|
|
if xp_pal:
|
|
out_path_pal = grit_out_path + '.pal'
|
|
out_files.append(out_path_pal)
|
|
else:
|
|
grit_enable_args.append('-p!')
|
|
|
|
self.target_files.extend(out_files)
|
|
|
|
out_files_str = ' '.join(out_files)
|
|
grit_enable_flags_str = ' '.join(grit_enable_args)
|
|
|
|
self.print(
|
|
f'build {out_files_str} : grit {in_path_png} || {out_path_dir}\n'
|
|
f' in_path_img = {in_path_png}\n'
|
|
f' grit_out_path = {grit_out_path}\n'
|
|
f' options = -W1 -ftB -fh! {grit_enable_flags_str} {grit_args}\n'
|
|
'\n'
|
|
)
|
|
|
|
def _add_nflib_gfx_shared_pal(self, in_dir, out_dir, xp_map, xp_img, grit_args):
|
|
'''
|
|
NFLIb: Generic rule to use grit to convert graphics files in png and jpg
|
|
format that share palettes. It is possible to specify what files to
|
|
export (map, tileset) by using the 'xp_map' and 'xp_img' parameters.
|
|
'''
|
|
# First, run grit to export all maps and tilesets and the single shared
|
|
# palette. Later that palette will be copied and renamed so that every
|
|
# PNG file ends up with its own palette.
|
|
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
in_files = gen_input_file_list(in_dir, ('.png'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.png', ''))
|
|
|
|
in_files = gen_input_file_list(in_dir, ('.jpg'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.jpg', ''))
|
|
|
|
assert len(in_out_files) >= 2, "At least 2 files expected"
|
|
|
|
in_paths_png = []
|
|
out_files = []
|
|
for in_out_file in in_out_files:
|
|
in_paths_png.append(in_out_file.in_path)
|
|
|
|
if xp_map:
|
|
out_path_map = in_out_file.out_path + '.map'
|
|
out_files.append(out_path_map)
|
|
|
|
if xp_img:
|
|
out_path_img = in_out_file.out_path + '.img'
|
|
out_files.append(out_path_img)
|
|
|
|
grit_out_shared_pal_path = in_out_files[0].out_path
|
|
out_path_dir = get_parent_dir(grit_out_shared_pal_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
out_path_pal = grit_out_shared_pal_path + '.pal'
|
|
out_files.append(out_path_pal)
|
|
|
|
grit_enable_args = []
|
|
|
|
if not xp_map:
|
|
grit_enable_args.append('-m!')
|
|
|
|
if not xp_img:
|
|
grit_enable_args.append('-g!')
|
|
|
|
self.target_files.extend(out_files)
|
|
|
|
in_files_str = ' '.join(in_paths_png)
|
|
out_files_str = ' '.join(out_files)
|
|
grit_enable_flags_str = ' '.join(grit_enable_args)
|
|
|
|
self.print(
|
|
f'build {out_files_str} : grit_nf_shared {in_files_str} || {out_path_dir}\n'
|
|
f' in_files_png = {in_files_str}\n'
|
|
f' options = -W1 -ftB -fh! {grit_enable_flags_str} {grit_args} -pS -D{out_path_dir} -O{grit_out_shared_pal_path}\n'
|
|
'\n'
|
|
)
|
|
|
|
# Copy palette and rename it for each BG
|
|
for in_out_file in in_out_files[1:]:
|
|
out_target_path_pal = in_out_file.out_path + '.pal'
|
|
self.target_files.append(out_target_path_pal)
|
|
self.print(
|
|
f'build {out_target_path_pal} : copy {out_path_pal} || {out_path_dir}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_nflib_bg_8bit(self, in_dirs, out_dir='bg'):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit bitmap backgrounds all the png
|
|
and jpg files in the specified list of directories. Color 0xFF00FF
|
|
(magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, True, True, True, '-gTFF00FF -gb -gB8')
|
|
|
|
def add_nflib_bg_16bit(self, in_dirs, out_dir='bg'):
|
|
'''
|
|
NFLib: Create rules to convert into 16-bit bitmap backgrounds all the
|
|
png and jpg files in the specified list of directories. Color 0xFF00FF
|
|
(magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, False, True, False, '-gTFF00FF -gb -gB16')
|
|
|
|
def add_nflib_bg_tiled(self, in_dirs, out_dir='bg'):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit tiled backgrounds all the png
|
|
and jpg files in the specified list of directories. Color 0xFF00FF
|
|
(magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, True, True, True, '-gTFF00FF -gt -gB8 -mR8 -mLs')
|
|
|
|
def add_nflib_bg_tiled_tileset(self, in_dirs, out_dir='bg'):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit tiledsets all the png and jpg
|
|
files in the specified list of directories. Color 0xFF00FF (magenta)
|
|
will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, False, True, True, '-gTFF00FF -gt -gB8 -mR8 -mLs')
|
|
|
|
def add_nflib_bg_affine(self, in_dirs, out_dir='bg'):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit affine backgrounds all the png
|
|
and jpg files in the specified list of directories. Color 0xFF00FF
|
|
(magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, True, True, True, '-gTFF00FF -gt -gB8 -mR8 -mLa')
|
|
|
|
def add_nflib_sprite_256(self, in_dirs, out_dir='spr'):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit sprites all the png and jpg
|
|
files in the specified list of directories. Color 0xFF00FF (magenta)
|
|
will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, False, True, True, '-gTFF00FF -gt -gB8')
|
|
|
|
def add_nflib_sprite_3d(self, in_dirs, out_dir='spr'):
|
|
'''
|
|
NFLib: Create rules to convert into 3D sprites all the png and jpg files
|
|
in the specified list of directories. Color 0xFF00FF (magenta) will be
|
|
used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, False, True, True, '-gTFF00FF -gx -gb -gB8')
|
|
|
|
def add_nflib_font(self, in_dirs, out_dir='fnt'):
|
|
'''
|
|
NFLib: Create rules to convert into fonts all the png and jpg files in
|
|
the specified list of directories. Color 0xFF00FF (magenta) will be used
|
|
as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, False, True, True, '-gTFF00FF -gt -gB8')
|
|
|
|
def add_nflib_colmap(self, in_dirs, out_dir='fnt'):
|
|
'''
|
|
NFLib: Create rules to convert into collision maps all the png and jpg
|
|
files in the specified list of directories. Color 0xFF00FF (magenta)
|
|
will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, True, False, False, '-gB8 -mRt -mLf')
|
|
|
|
def add_nflib_colbg(self, in_dirs, out_dir='fnt'):
|
|
'''
|
|
NFLib: Create rules to convert into collision backgrounds all the png
|
|
and jpg files in the specified list of directories. Color 0xFF00FF
|
|
(magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx(in_dirs, out_dir, True, True, False, '-gt -gB8 -mRtp -mLf')
|
|
|
|
def add_nflib_bg_8bit_shared_pal(self, in_dir, out_dir):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit bitmap backgrounds all the png
|
|
and jpg files in the specified directory. They will share the palette,
|
|
and one palette file will be created for each image (they will all be
|
|
the same). Color 0xFF00FF (magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx_shared_pal(in_dir, out_dir, True, True, '-gTFF00FF -gb -gB8')
|
|
|
|
def add_nflib_bg_affine_shared_pal(self, in_dir, out_dir):
|
|
'''
|
|
NFLib: Create rules to convert into 8-bit affine backgrounds all the png
|
|
and jpg files in the specified directory. They will share the palette,
|
|
and one palette file will be created for each image (they will all be
|
|
the same). Color 0xFF00FF (magenta) will be used as transparent color.
|
|
'''
|
|
self._add_nflib_gfx_shared_pal(in_dir, out_dir, True, True, '-gTFF00FF -gt -gB8 -mR8 -mLa')
|
|
|
|
def add_nitro_engine_obj(self, in_dirs, out_dir='models'):
|
|
'''
|
|
Nitro Engine: This function gets as input a list of directories. It will
|
|
look for files with extension '.obj' and look for another '.json' file
|
|
with the same base name. Then, it will create rules to convert them and
|
|
add them to the filesystem.
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
in_out_files = []
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.obj'))
|
|
in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.obj', '.dl'))
|
|
|
|
for in_out_file in in_out_files:
|
|
out_path_dl = in_out_file.out_path
|
|
|
|
out_path_dir = get_parent_dir(out_path_dl)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_obj = in_out_file.in_path
|
|
in_path_json = replace_ext(in_path_obj, '.obj', '.json')
|
|
|
|
json_data = load_json(in_path_json)
|
|
assert 'texture' in json_data, 'Texture size must be provided'
|
|
|
|
args = (
|
|
'--texture ' + str(json_data['texture'][0]) + ' ' +
|
|
str(json_data['texture'][1])
|
|
)
|
|
|
|
if 'scale' in json_data:
|
|
args += ' --scale ' + str(json_data['scale'])
|
|
|
|
if 'use-vertex-color' in json_data:
|
|
if json_data['use-vertex-color']: # Only add this if True
|
|
args += ' --use-vertex-color '
|
|
|
|
self.target_files.append(out_path_dl)
|
|
|
|
self.print(
|
|
f'build {out_path_dl} : obj2dl {in_path_obj} {in_path_json} || {out_path_dir}\n'
|
|
f' in_path_obj = {in_path_obj}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
def add_nitro_engine_md5(self, in_dirs, out_dir='models'):
|
|
'''
|
|
Nitro Engine: Looks for md5mesh files in the provided directores. Each
|
|
file must be acompanied by a json file with some information. For
|
|
example:
|
|
|
|
{
|
|
"texture": [256, 256],
|
|
"blender-fix": true,
|
|
"export-base-pose": false,
|
|
"animations": [
|
|
{
|
|
"file": "wave.md5anim",
|
|
"skip-frames": 1
|
|
}
|
|
]
|
|
}
|
|
'''
|
|
full_out_dir = os.path.join(self.out_assets_path, out_dir)
|
|
|
|
md5mesh_in_out_files =[]
|
|
|
|
for in_dir in in_dirs:
|
|
in_files = gen_input_file_list(in_dir, ('.md5mesh'))
|
|
md5mesh_in_out_files.extend(gen_out_file_list(in_files, in_dir, full_out_dir, '.md5mesh', ''))
|
|
|
|
for in_out_file in md5mesh_in_out_files:
|
|
out_path_dir = get_parent_dir(in_out_file.out_path)
|
|
self.add_dir_target(out_path_dir)
|
|
|
|
in_path_md5mesh = in_out_file.in_path
|
|
in_path_json = replace_ext(in_path_md5mesh, '.md5mesh', '.json')
|
|
|
|
json_data = load_json(in_path_json)
|
|
assert 'texture' in json_data, 'Texture size must be provided'
|
|
|
|
args = (
|
|
'--texture ' + str(json_data['texture'][0]) + ' ' +
|
|
str(json_data['texture'][1])
|
|
)
|
|
|
|
if 'blender-fix' in json_data:
|
|
if json_data['blender-fix']:
|
|
args += ' --blender-fix'
|
|
|
|
base_name = remove_ext(get_file_name(in_path_md5mesh))
|
|
|
|
args += f' --name {base_name} --output {out_path_dir} --model {in_path_md5mesh}'
|
|
|
|
out_path_dsm = in_out_file.out_path + '.dsm'
|
|
self.target_files.append(out_path_dsm)
|
|
|
|
args_str = ' '.join(args)
|
|
|
|
self.print(
|
|
f'build {out_path_dsm} : md5_to_dsma {in_path_md5mesh} {in_path_json} || {out_path_dir}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
if 'animations' in json_data:
|
|
in_path_dir = get_parent_dir(in_out_file.in_path)
|
|
|
|
for animation in json_data['animations']:
|
|
assert 'file' in animation
|
|
in_path_md5anim = os.path.join(in_path_dir, animation['file'])
|
|
|
|
args = f' --name {base_name} --output {out_path_dir} --anim {in_path_md5anim}'
|
|
|
|
if 'skip-frames' in animation:
|
|
args += ' --skip-frames ' + str(animation['skip-frames'])
|
|
|
|
if 'blender-fix' in json_data:
|
|
if json_data['blender-fix']:
|
|
args += ' --blender-fix'
|
|
|
|
base_name_anim = remove_ext(get_file_name(in_path_md5anim))
|
|
|
|
out_path_dsa = in_out_file.out_path + '_' + base_name_anim + '.dsa'
|
|
self.target_files.append(out_path_dsa)
|
|
|
|
args_str = ' '.join(args)
|
|
|
|
self.print(
|
|
f'build {out_path_dsa} : md5_to_dsma {in_path_md5anim} {in_path_json} || {out_path_dir}\n'
|
|
f' args = {args}\n'
|
|
'\n'
|
|
)
|
|
|
|
class NitroFS(GenericFilesystem):
|
|
'''
|
|
Class that defines rules to add files to the NitroFS filesystem with a
|
|
previous conversion step. This can be used for graphics, music, etc. This
|
|
class isn't required for files that aren't modified. For them, use
|
|
'nitrofsdirs' from the 'NdsRom' class instead.
|
|
'''
|
|
|
|
ASSETS_BARRIER_NITROFS = 'assets_nitrofs_flag'
|
|
|
|
def __init__(self):
|
|
super().__init__(self.ASSETS_BARRIER_NITROFS, 'build/nitrofs',
|
|
'build/temp/nitrofs')
|
|
|
|
class FatFS(GenericFilesystem):
|
|
'''
|
|
Class that defines rules to add files to the SD filesystem with a previous
|
|
conversion step. This can be used for graphics, music, etc. It can also be
|
|
used to add generic files to the tree structure.
|
|
'''
|
|
|
|
ASSETS_BARRIER_FATFS = 'assets_fatfs_flag'
|
|
|
|
def __init__(self, out_dir='sdroot'):
|
|
out_temp_path = os.path.join('build/temp', out_dir)
|
|
super().__init__(self.ASSETS_BARRIER_FATFS, out_dir, out_temp_path)
|
|
|
|
class NdsRom(GenericBinary):
|
|
'''
|
|
Class that represents a NDS ROM and may be linked to any number of CPU and
|
|
filesystem binaries.
|
|
'''
|
|
|
|
def __init__(self, *, nitrofsdirs=[],
|
|
nds_path=os.path.basename(os.getcwd()) + '.nds',
|
|
binaries=[],
|
|
game_title='NDS ROM',
|
|
game_subtitle='Built with BlocksDS',
|
|
game_author='github.com/blocksds/sdk',
|
|
game_icon='${BLOCKSDS}/sys/icon.bmp'):
|
|
'''
|
|
Constructor of NDS ROM build rules.
|
|
|
|
Mandatory arguments:
|
|
|
|
- 'binaries': List of binaries added to this NDS ROM. You must provide a
|
|
list of CPU binaries with at least an ARM9 binary. If no ARM7 is
|
|
provided, the default BlocksDS binary will be used. Teak binaries and
|
|
NitroFS filesystem images are also added to this list.
|
|
|
|
Optional arguments:
|
|
|
|
- 'nitrofsdirs': List of path to directories that will be added to the
|
|
root of the NitroFS filesystem.
|
|
- 'game_title': First line of the ROM header text.
|
|
- 'game_subtitle': Second line of the ROM header text.
|
|
- 'game_author': Third line of the ROM header text.
|
|
- 'game_icon': Icon to be used in the ROM header.
|
|
- 'nds_path': Output path of the generated NDS file. The default value
|
|
is generated from the current directory name.
|
|
'''
|
|
|
|
super().__init__(None)
|
|
|
|
self.nitrofsdirs = nitrofsdirs
|
|
self.nds_path = nds_path
|
|
|
|
self.game_title = game_title
|
|
self.game_subtitle = game_subtitle
|
|
self.game_author = game_author
|
|
self.game_icon = game_icon
|
|
|
|
self.arm9 = None
|
|
self.nitrofs = None
|
|
|
|
# If no ARM7 is specified later, use the default one
|
|
self.arm7 = Arm7BinaryDefault()
|
|
|
|
# After everything is setup, load the provided binaries
|
|
self.sub_binaries = []
|
|
self._add_binaries(binaries)
|
|
|
|
def _add_binaries(self, binaries):
|
|
'''
|
|
This links a list of binaries to this NDS ROM instance.
|
|
'''
|
|
self.sub_binaries.extend(binaries)
|
|
|
|
for binary in binaries:
|
|
type_name = type(binary).__name__
|
|
if type_name == 'NitroFS':
|
|
self.nitrofs = binary
|
|
elif type_name == 'Arm9Binary':
|
|
self.arm9 = binary
|
|
elif type_name == 'Arm7Binary' or type_name == 'Arm7BinaryDefault':
|
|
# This will replace the default ARM7 binary
|
|
self.arm7 = binary
|
|
else:
|
|
# Other binaries don't require special handling
|
|
pass
|
|
|
|
def _gen_rules_nds(self):
|
|
'''
|
|
Internal function that generates a rule to call ndstool and generate a
|
|
NDS ROM file. This rule depends directly on NitroFS directories if any
|
|
has been provided by the user.
|
|
'''
|
|
# Combine the title strings
|
|
if self.game_subtitle is None:
|
|
game_full_title = '"{self.game_title};{self.game_author}"'
|
|
else:
|
|
game_full_title = f'"{self.game_title};{self.game_subtitle};{self.game_author}"'
|
|
|
|
# If a filesystem has been provided, add it to the build and make the
|
|
# final ROM depend on the filesystem contents being ready. Note that
|
|
# this dependency is a real dependency, not "order only". It is
|
|
# important to regenerate the NDS ROM whenever a file changes.
|
|
if self.nitrofs is not None:
|
|
self.nitrofsdirs.extend(['build/nitrofs'])
|
|
flag_dep = f' {self.nitrofs.flag_assets_name}'
|
|
else:
|
|
flag_dep = ''
|
|
|
|
if self.nitrofsdirs is not None and len(self.nitrofsdirs) > 0:
|
|
ndstool_nitrofs_flags = '-d ' + ' '.join(self.nitrofsdirs)
|
|
else:
|
|
ndstool_nitrofs_flags = ''
|
|
|
|
nitrodir_paths = ' '.join(self.nitrofsdirs)
|
|
|
|
self.print(
|
|
f'build {self.nds_path}: ndstool {self.arm7.elf_path} {self.arm9.elf_path} {nitrodir_paths} {flag_dep}\n'
|
|
f' arm7elf = {self.arm7.elf_path}\n'
|
|
f' arm9elf = {self.arm9.elf_path}\n'
|
|
f' game_icon = {self.game_icon}\n'
|
|
f' game_full_title = {game_full_title}\n'
|
|
f' ndstool_nitrofs_flags = {ndstool_nitrofs_flags}\n'
|
|
'\n'
|
|
)
|
|
|
|
def generate_nds(self):
|
|
'''
|
|
This function generates rules to use build tools, combines the rules to
|
|
build all sub binaries, and it generates rules to build the final NDS
|
|
file.
|
|
|
|
An ARM9 binary must have been provided. If no ARM7 is provided it will
|
|
use the default ARM7 provided by BlocksDS. Other binaries, like Teak
|
|
binaries or NitroFS, are optional.
|
|
'''
|
|
# General rules for all used tools
|
|
self._gen_rules_tools()
|
|
|
|
# Rules to build each sub-binary
|
|
for binary in self.sub_binaries:
|
|
self.contents += binary.contents
|
|
|
|
# Rules to build NDS ROM
|
|
self._gen_rules_nds()
|
|
|
|
# Get directories from all internal binaries
|
|
for binary in self.sub_binaries:
|
|
for dir_target in binary.dir_targets:
|
|
self.dir_targets.add(dir_target)
|
|
|
|
# Rules to generate all directories
|
|
self._gen_rules_build_directories()
|