Refactor teachable learnset helper (#6378)
This commit is contained in:
parent
3398d75fe8
commit
c7ac1a19df
1
.gitignore
vendored
1
.gitignore
vendored
@ -45,3 +45,4 @@ tools/trainerproc/trainerproc
|
||||
*.smol
|
||||
*.fastSmol
|
||||
*.smolTM
|
||||
__pycache__
|
||||
|
||||
18
Makefile
18
Makefile
@ -165,6 +165,12 @@ PATCHELF := $(TOOLS_DIR)/patchelf/patchelf$(EXE)
|
||||
ROMTEST ?= $(shell { command -v mgba-rom-test || command -v $(TOOLS_DIR)/mgba/mgba-rom-test$(EXE); } 2>/dev/null)
|
||||
ROMTESTHYDRA := $(TOOLS_DIR)/mgba-rom-test-hydra/mgba-rom-test-hydra$(EXE)
|
||||
|
||||
# Learnset helper is a Python script
|
||||
LEARNSET_HELPERS_DIR := $(TOOLS_DIR)/learnset_helpers
|
||||
LEARNSET_HELPERS_DATA_DIR := $(LEARNSET_HELPERS_DIR)/porymoves_files
|
||||
LEARNSET_HELPERS_BUILD_DIR := $(LEARNSET_HELPERS_DIR)/build
|
||||
ALL_LEARNABLES_JSON := $(LEARNSET_HELPERS_BUILD_DIR)/all_learnables.json
|
||||
|
||||
PERL := perl
|
||||
SHA1 := $(shell { command -v sha1sum || command -v shasum; } 2>/dev/null) -c
|
||||
|
||||
@ -338,6 +344,7 @@ generated: $(AUTO_GEN_TARGETS)
|
||||
|
||||
clean-generated:
|
||||
-rm -f $(AUTO_GEN_TARGETS)
|
||||
-rm -f $(ALL_LEARNABLES_JSON)
|
||||
|
||||
COMPETITIVE_PARTY_SYNTAX := $(shell PATH="$(PATH)"; echo 'COMPETITIVE_PARTY_SYNTAX' | $(CPP) $(CPPFLAGS) -imacros include/gba/defines.h -imacros include/config/general.h | tail -n1)
|
||||
ifeq ($(COMPETITIVE_PARTY_SYNTAX),1)
|
||||
@ -427,11 +434,16 @@ $(OBJ_DIR)/sym_common.ld: sym_common.txt $(C_OBJS) $(wildcard common_syms/*.txt)
|
||||
$(OBJ_DIR)/sym_ewram.ld: sym_ewram.txt
|
||||
$(RAMSCRGEN) ewram_data $< ENGLISH > $@
|
||||
|
||||
MOVES_JSON_DIR := $(TOOLS_DIR)/learnset_helpers/porymoves_files
|
||||
TEACHABLE_DEPS := $(shell find data/ -type f -name '*.inc') $(INCLUDE_DIRS)/constants/tms_hms.h $(C_SUBDIR)/pokemon.c $(wildcard $(MOVES_JSON_DIR)/*.json)
|
||||
TEACHABLE_DEPS := $(ALL_LEARNABLES_JSON) $(shell find data/ -type f -name '*.inc') $(INCLUDE_DIRS)/constants/tms_hms.h $(C_SUBDIR)/pokemon.c
|
||||
|
||||
$(LEARNSET_HELPERS_BUILD_DIR):
|
||||
@mkdir -p $@
|
||||
|
||||
$(ALL_LEARNABLES_JSON): $(wildcard $(LEARNSET_HELPERS_DATA_DIR)/*.json) | $(LEARNSET_HELPERS_BUILD_DIR)
|
||||
python3 $(LEARNSET_HELPERS_DIR)/make_learnables.py $(LEARNSET_HELPERS_DATA_DIR) $@
|
||||
|
||||
$(DATA_SRC_SUBDIR)/pokemon/teachable_learnsets.h: $(TEACHABLE_DEPS)
|
||||
python3 $(TOOLS_DIR)/learnset_helpers/teachable.py
|
||||
python3 $(LEARNSET_HELPERS_DIR)/make_teachables.py $<
|
||||
|
||||
# Linker script
|
||||
LD_SCRIPT := ld_script_modern.ld
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
62
tools/learnset_helpers/make_learnables.py
Executable file
62
tools/learnset_helpers/make_learnables.py
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Usage: python3 make_learnables.py INPUTS_DIR OUTPUT_FILE
|
||||
|
||||
Build a primary store of learnable moves for each species based on input documents. This script
|
||||
is meant to be run to generate a pre-processed store of data that should not change very much;
|
||||
thus, it can safely be pre-computed in order to speed up incremental builds for end-users.
|
||||
"""
|
||||
|
||||
from functools import reduce
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
|
||||
def from_single(fname: pathlib.Path) -> dict[str, set[str]]:
|
||||
with open(fname, "r") as fp:
|
||||
return {
|
||||
species: set([level_up["Move"] for level_up in by_method["LevelMoves"]])
|
||||
| set([move for move in by_method["TMMoves"]])
|
||||
| set([move for move in by_method["EggMoves"]])
|
||||
| set([move for move in by_method["TutorMoves"]])
|
||||
for species, by_method in json.load(fp).items()
|
||||
}
|
||||
|
||||
|
||||
def from_batch(dir: pathlib.Path) -> dict[str, set[str]]:
|
||||
return reduce(
|
||||
lambda acc, single: {
|
||||
species: acc.get(species, set()) | single.get(species, set())
|
||||
for species in acc.keys() | single.keys()
|
||||
},
|
||||
map(from_single, dir.glob("*.json")),
|
||||
{},
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 3:
|
||||
print("Missing required arguments", file=sys.stderr)
|
||||
print(__doc__, file=sys.stderr)
|
||||
quit(1)
|
||||
|
||||
INPUTS_DIR = pathlib.Path(sys.argv[1])
|
||||
OUTPUT_FILE = pathlib.Path(sys.argv[2])
|
||||
|
||||
assert INPUTS_DIR.exists(), f"{INPUTS_DIR=} does not exist"
|
||||
assert INPUTS_DIR.is_dir(), f"{INPUTS_DIR=} is not a directory"
|
||||
assert OUTPUT_FILE.parent.exists(), f"parent of {OUTPUT_FILE=} does not exist"
|
||||
|
||||
batch = {
|
||||
species: list(sorted(learnables))
|
||||
for species, learnables in from_batch(INPUTS_DIR).items()
|
||||
}
|
||||
with open(OUTPUT_FILE, "w") as fp:
|
||||
json.dump(batch, fp, indent=2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
204
tools/learnset_helpers/make_teachables.py
Normal file
204
tools/learnset_helpers/make_teachables.py
Normal file
@ -0,0 +1,204 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Usage: python3 make_teachable.py SOURCE_LEARNSETS_JSON
|
||||
|
||||
Build a C-header defining the set of teachable moves for each configured-on
|
||||
species-family based on the learnable moves defined in SOURCE_LEARNSETS_JSON.
|
||||
|
||||
A move is "teachable" if it is:
|
||||
1. Can be taught by some Move Tutor in the overworld, which is identified by
|
||||
using the ChooseMonForMoveTutor special in a script and setting VAR_0x8005
|
||||
to the offered MOVE constant. (e.g., MOVE_SWAGGER)
|
||||
2. Assigned to some TM or HM in include/constants/tms_hms.h using the
|
||||
FOREACH_TM macro.
|
||||
3. Not a universal move, as defined by sUniversalMoves in src/pokemon.c.
|
||||
|
||||
For a given species, a move is considered teachable to that species if:
|
||||
1. The species is not NONE -- which learns nothing -- nor MEW -- which
|
||||
learns everything.
|
||||
2. The species can learn the move via *any* method within any Expansion-
|
||||
supported game.
|
||||
"""
|
||||
|
||||
from itertools import chain
|
||||
from textwrap import dedent
|
||||
|
||||
import glob
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import typing
|
||||
|
||||
|
||||
CONFIG_ENABLED_PAT = re.compile(r"#define P_LEARNSET_HELPER_TEACHABLE\s+(?P<cfg_val>[^ ]*)")
|
||||
INCFILE_HAS_TUTOR_PAT = re.compile(r"special ChooseMonForMoveTutor")
|
||||
INCFILE_MOVE_PAT = re.compile(r"setvar VAR_0x8005, (MOVE_.*)")
|
||||
TMHM_MACRO_PAT = re.compile(r"F\((\w+)\)")
|
||||
UNIVERSAL_MOVES_PAT = re.compile(r"static const u16 sUniversalMoves\[\]\s*=\s*{((.|\n)*?)\n};")
|
||||
TEACHABLE_ARRAY_DECL_PAT = re.compile(r"(?P<decl>static const u16 s(?P<name>\w+)TeachableLearnset\[\]) = {[\s\S]*?};")
|
||||
SNAKIFY_PAT = re.compile(r"(?!^)([A-Z]+)")
|
||||
|
||||
|
||||
def enabled() -> bool:
|
||||
"""
|
||||
Check if the user has explicitly enabled this opt-in helper.
|
||||
"""
|
||||
with open("./include/config/pokemon.h", "r") as cfg_pokemon_fp:
|
||||
cfg_pokemon = cfg_pokemon_fp.read()
|
||||
cfg_defined = CONFIG_ENABLED_PAT.search(cfg_pokemon)
|
||||
return cfg_defined is not None and cfg_defined.group("cfg_val") in ("TRUE", "1")
|
||||
|
||||
|
||||
def extract_repo_tutors() -> typing.Generator[str, None, None]:
|
||||
"""
|
||||
Yield MOVE constants which are *likely* assigned to a move tutor. This isn't
|
||||
foolproof, but it's suitable.
|
||||
"""
|
||||
for inc_fname in chain(glob.glob("./data/scripts/*.inc"), glob.glob("./data/maps/*/scripts.inc")):
|
||||
with open(inc_fname, "r") as inc_fp:
|
||||
incfile = inc_fp.read()
|
||||
if not INCFILE_HAS_TUTOR_PAT.search(incfile):
|
||||
continue
|
||||
|
||||
for move in INCFILE_MOVE_PAT.finditer(incfile):
|
||||
yield move.group(1)
|
||||
|
||||
|
||||
def extract_repo_tms() -> typing.Generator[str, None, None]:
|
||||
"""
|
||||
Yield MOVE constants assigned to a TM or HM in the user's repo.
|
||||
"""
|
||||
with open("./include/constants/tms_hms.h", "r") as tmshms_fp:
|
||||
tmshms = tmshms_fp.read()
|
||||
match_it = TMHM_MACRO_PAT.finditer(tmshms)
|
||||
if not match_it:
|
||||
return
|
||||
|
||||
for match in match_it:
|
||||
yield f"MOVE_{match.group(1)}"
|
||||
|
||||
|
||||
def extract_repo_universals() -> list[str]:
|
||||
"""
|
||||
Return a list of MOVE constants which are deemed to be universal and can
|
||||
thus be learned by any species.
|
||||
"""
|
||||
with open("./src/pokemon.c", "r") as pokemon_fp:
|
||||
if match := UNIVERSAL_MOVES_PAT.search(pokemon_fp.read()):
|
||||
return list(filter(lambda s: s, map(lambda s: s.strip(), match.group(1).split(','))))
|
||||
return list()
|
||||
|
||||
|
||||
def prepare_output(all_learnables: dict[str, set[str]], repo_teachables: set[str], header: str) -> str:
|
||||
"""
|
||||
Build the file content for teachable_learnsets.h.
|
||||
"""
|
||||
with open("./src/data/pokemon/teachable_learnsets.h", "r") as teachables_fp:
|
||||
old = teachables_fp.read()
|
||||
|
||||
cursor = 0
|
||||
new = header + dedent("""
|
||||
static const u16 sNoneTeachableLearnset[] = {
|
||||
MOVE_UNAVAILABLE,
|
||||
};
|
||||
""")
|
||||
|
||||
joinpat = ",\n "
|
||||
for species in TEACHABLE_ARRAY_DECL_PAT.finditer(old):
|
||||
match_b, match_e = species.span()
|
||||
species_upper = SNAKIFY_PAT.sub(r"_\1", species.group("name")).upper()
|
||||
if species_upper == "NONE":
|
||||
# NONE is hard-coded to be at the start of the file to keep this code simple.
|
||||
cursor = match_e + 1
|
||||
continue
|
||||
|
||||
if species_upper == "MEW":
|
||||
new += old[cursor:match_e + 1] # copy the original content and skip.
|
||||
cursor = match_e + 1
|
||||
continue
|
||||
|
||||
repo_species_teachables = filter(lambda m: m in repo_teachables, all_learnables[species_upper])
|
||||
|
||||
new += old[cursor:match_b]
|
||||
new += "\n".join([
|
||||
f"{species.group('decl')} = {{",
|
||||
f" {joinpat.join(chain(repo_species_teachables, ('MOVE_UNAVAILABLE',)))},",
|
||||
"};\n",
|
||||
])
|
||||
cursor = match_e + 1
|
||||
|
||||
new += old[cursor:]
|
||||
|
||||
return new
|
||||
|
||||
|
||||
def prepare_header(h_align: int, tmshms: list[str], tutors: list[str], universals: list[str]) -> str:
|
||||
universals_title = "Near-universal moves found from sUniversalMoves:"
|
||||
tmhm_title = "TM/HM moves found in \"include/constants/tms_hms.h\":"
|
||||
tutor_title = "Tutor moves found from map scripts:"
|
||||
h_align = max(h_align, len(universals_title), len(tmhm_title), len(tutor_title))
|
||||
|
||||
lines = [
|
||||
"//",
|
||||
"// DO NOT MODIFY THIS FILE! It is auto-generated by tools/learnset_helpers/make_teachables.py",
|
||||
"//",
|
||||
"",
|
||||
f"// {'*' * h_align} //",
|
||||
f"// {tmhm_title: >{h_align}} //",
|
||||
]
|
||||
lines.extend([f"// - {move: <{h_align - 2}} //" for move in tmshms])
|
||||
lines.extend([
|
||||
f"// {'*' * h_align} //",
|
||||
f"// {tutor_title: <{h_align}} //",
|
||||
])
|
||||
lines.extend([f"// - {move: <{h_align - 2}} //" for move in sorted(tutors)])
|
||||
lines.extend([
|
||||
f"// {'*' * h_align} //",
|
||||
f"// {universals_title: <{h_align}} //",
|
||||
])
|
||||
lines.extend([f"// - {move: <{h_align - 2}} //" for move in universals])
|
||||
lines.extend([
|
||||
f"// {'*' * h_align} //",
|
||||
"",
|
||||
])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
if not enabled():
|
||||
quit()
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Missing required arguments", file=sys.stderr)
|
||||
print(__doc__, file=sys.stderr)
|
||||
quit(1)
|
||||
|
||||
SOURCE_LEARNSETS_JSON = pathlib.Path(sys.argv[1])
|
||||
|
||||
assert SOURCE_LEARNSETS_JSON.exists(), f"{SOURCE_LEARNSETS_JSON=} does not exist"
|
||||
assert SOURCE_LEARNSETS_JSON.is_file(), f"{SOURCE_LEARNSETS_JSON=} is not a file"
|
||||
|
||||
repo_universals = extract_repo_universals()
|
||||
repo_tms = list(extract_repo_tms())
|
||||
repo_tutors = list(extract_repo_tutors())
|
||||
repo_teachables = set(filter(
|
||||
lambda move: move not in set(repo_universals),
|
||||
chain(repo_tms, repo_tutors)
|
||||
))
|
||||
|
||||
h_align = max(map(lambda move: len(move), chain(repo_universals, repo_teachables))) + 2
|
||||
header = prepare_header(h_align, repo_tms, repo_tutors, repo_universals)
|
||||
|
||||
with open(SOURCE_LEARNSETS_JSON, "r") as source_fp:
|
||||
all_learnables = json.load(source_fp)
|
||||
|
||||
content = prepare_output(all_learnables, repo_teachables, header)
|
||||
with open("./src/data/pokemon/teachable_learnsets.h", "w") as teachables_fp:
|
||||
teachables_fp.write(content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,214 +0,0 @@
|
||||
import glob
|
||||
import re
|
||||
import json
|
||||
import os
|
||||
|
||||
# before all else, abort if the config is off
|
||||
with open("./include/config/pokemon.h", "r") as file:
|
||||
learnset_config = re.findall(r"#define P_LEARNSET_HELPER_TEACHABLE *([^ ]*)", file.read())
|
||||
if len(learnset_config) != 1:
|
||||
quit()
|
||||
if learnset_config[0] != "TRUE":
|
||||
quit()
|
||||
|
||||
def parse_mon_name(name):
|
||||
return re.sub(r'(?!^)([A-Z]+)', r'_\1', name).upper()
|
||||
|
||||
tm_moves = []
|
||||
tutor_moves = []
|
||||
|
||||
# scan incs
|
||||
incs_to_check = glob.glob('./data/scripts/*.inc') # all .incs in the script folder
|
||||
incs_to_check += glob.glob('./data/maps/*/scripts.inc') # all map scripts
|
||||
|
||||
if len(incs_to_check) == 0: # disabled if no jsons present
|
||||
quit()
|
||||
|
||||
for file in incs_to_check:
|
||||
with open(file, 'r') as f2:
|
||||
raw = f2.read()
|
||||
if 'special ChooseMonForMoveTutor' in raw:
|
||||
for x in re.findall(r'setvar VAR_0x8005, (MOVE_.*)', raw):
|
||||
if not x in tutor_moves:
|
||||
tutor_moves.append(x)
|
||||
|
||||
# scan TMs and HMs
|
||||
with open("./include/constants/tms_hms.h", 'r') as file:
|
||||
for x in re.findall(r'F\((.*)\)', file.read()):
|
||||
if not 'MOVE_' + x in tm_moves:
|
||||
tm_moves.append('MOVE_' + x)
|
||||
|
||||
# look up universal moves to exclude them
|
||||
universal_moves = []
|
||||
with open("./src/pokemon.c", "r") as file:
|
||||
for x in re.findall(r"static const u16 sUniversalMoves\[\] =(.|\n)*?{((.|\n)*?)};", file.read())[0]:
|
||||
x = x.replace("\n", "")
|
||||
for y in x.split(","):
|
||||
y = y.strip()
|
||||
if y == "":
|
||||
continue
|
||||
universal_moves.append(y)
|
||||
|
||||
# get compatibility from jsons
|
||||
def construct_compatibility_dict(force_custom_check):
|
||||
dict_out = {}
|
||||
for pth in glob.glob('./tools/learnset_helpers/porymoves_files/*.json'):
|
||||
f = open(pth, 'r')
|
||||
data = json.load(f)
|
||||
for mon in data.keys():
|
||||
if not mon in dict_out:
|
||||
dict_out[mon] = []
|
||||
for move in data[mon]['LevelMoves']:
|
||||
if not move['Move'] in dict_out[mon]:
|
||||
dict_out[mon].append(move['Move'])
|
||||
#for move in data[mon]['PreEvoMoves']:
|
||||
# if not move in dict_out[mon]:
|
||||
# dict_out[mon].append(move)
|
||||
for move in data[mon]['TMMoves']:
|
||||
if not move in dict_out[mon]:
|
||||
dict_out[mon].append(move)
|
||||
for move in data[mon]['EggMoves']:
|
||||
if not move in dict_out[mon]:
|
||||
dict_out[mon].append(move)
|
||||
for move in data[mon]['TutorMoves']:
|
||||
if not move in dict_out[mon]:
|
||||
dict_out[mon].append(move)
|
||||
|
||||
# if the file was not previously generated, check if there is custom data there that needs to be preserved
|
||||
with open("./src/data/pokemon/teachable_learnsets.h", 'r') as file:
|
||||
raw = file.read()
|
||||
if not "// DO NOT MODIFY THIS FILE!" in raw and force_custom_check == True:
|
||||
custom_teachable_compatibilities = {}
|
||||
for entry in re.findall(r"static const u16 s(.*)TeachableLearnset\[\] = {\n((.|\n)*?)\n};", raw):
|
||||
monname = parse_mon_name(entry[0])
|
||||
if monname == "NONE":
|
||||
continue
|
||||
compatibility = entry[1].split("\n")
|
||||
if not monname in custom_teachable_compatibilities:
|
||||
custom_teachable_compatibilities[monname] = []
|
||||
if not monname in dict_out:
|
||||
# this mon is unknown, so all data needs to be preserved
|
||||
for move in compatibility:
|
||||
move = move.replace(",", "").strip()
|
||||
if move == "" or move == "MOVE_UNAVAILABLE":
|
||||
continue
|
||||
custom_teachable_compatibilities[monname].append(move)
|
||||
else:
|
||||
# this mon is known, so check if the moves in the old teachable_learnsets.h are not in the jsons
|
||||
for move in compatibility:
|
||||
move = move.replace(",", "").strip()
|
||||
if move == "" or move == "MOVE_UNAVAILABLE":
|
||||
continue
|
||||
if not move in dict_out[monname]:
|
||||
custom_teachable_compatibilities[monname].append(move)
|
||||
# actually store the data in custom.json
|
||||
if os.path.exists("./tools/learnset_helpers/porymoves_files/custom.json"):
|
||||
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "r")
|
||||
custom_json = json.load(f2)
|
||||
f2.close()
|
||||
else:
|
||||
custom_json = {}
|
||||
for x in custom_teachable_compatibilities:
|
||||
if len(custom_teachable_compatibilities[x]) == 0:
|
||||
continue
|
||||
if not x in custom_json:
|
||||
custom_json[x] = {"LevelMoves": [], "PreEvoMoves": [], "TMMoves": [], "EggMoves": [], "TutorMoves": []}
|
||||
for move in custom_teachable_compatibilities[x]:
|
||||
custom_json[x]["TutorMoves"].append(move)
|
||||
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "w")
|
||||
f2.write(json.dumps(custom_json, indent=2))
|
||||
f2.close()
|
||||
print("FIRST RUN: Updated custom.json with teachable_learnsets.h's data")
|
||||
# rerun the process
|
||||
dict_out = construct_compatibility_dict(False)
|
||||
return dict_out
|
||||
|
||||
compatibility_dict = construct_compatibility_dict(True)
|
||||
|
||||
# actually prepare the file
|
||||
with open("./src/data/pokemon/teachable_learnsets.h", 'r') as file:
|
||||
out = file.read()
|
||||
list_of_mons = re.findall(r'static const u16 s(.*)TeachableLearnset', out)
|
||||
for mon in list_of_mons:
|
||||
mon_parsed = parse_mon_name(mon)
|
||||
tm_learnset = []
|
||||
tutor_learnset = []
|
||||
if mon_parsed == "NONE" or mon_parsed == "MEW":
|
||||
continue
|
||||
if not mon_parsed in compatibility_dict:
|
||||
print("Unable to find %s in json" % mon)
|
||||
continue
|
||||
for move in tm_moves:
|
||||
if move in universal_moves:
|
||||
continue
|
||||
if move in tm_learnset:
|
||||
continue
|
||||
if move in compatibility_dict[mon_parsed]:
|
||||
tm_learnset.append(move)
|
||||
continue
|
||||
for move in tutor_moves:
|
||||
if move in universal_moves:
|
||||
continue
|
||||
if move in tutor_learnset:
|
||||
continue
|
||||
if move in compatibility_dict[mon_parsed]:
|
||||
tutor_learnset.append(move)
|
||||
continue
|
||||
tm_learnset.sort()
|
||||
tutor_learnset.sort()
|
||||
tm_learnset += tutor_learnset
|
||||
repl = "static const u16 s%sTeachableLearnset[] = {\n " % mon
|
||||
if len(tm_learnset) > 0:
|
||||
repl += ",\n ".join(tm_learnset) + ",\n "
|
||||
repl += "MOVE_UNAVAILABLE,\n};"
|
||||
newout = re.sub(r'static const u16 s%sTeachableLearnset\[\] = {[\s\S]*?};' % mon, repl, out)
|
||||
if newout != out:
|
||||
out = newout
|
||||
print("Updated %s" % mon)
|
||||
|
||||
# add/update header
|
||||
header = "//\n// DO NOT MODIFY THIS FILE! It is auto-generated from tools/learnset_helpers/teachable.py\n//\n\n"
|
||||
longest_move_name = 0
|
||||
for move in tm_moves + tutor_moves:
|
||||
if len(move) > longest_move_name:
|
||||
longest_move_name = len(move)
|
||||
longest_move_name += 2 # + 2 for a hyphen and a space
|
||||
|
||||
universal_title = "Near-universal moves found in sUniversalMoves:"
|
||||
tmhm_title = "TM/HM moves found in \"include/constants/tms_hms.h\":"
|
||||
tutor_title = "Tutor moves found in map scripts:"
|
||||
|
||||
if longest_move_name < len(universal_title):
|
||||
longest_move_name = len(universal_title)
|
||||
if longest_move_name < len(tmhm_title):
|
||||
longest_move_name = len(tmhm_title)
|
||||
if longest_move_name < len(tutor_title):
|
||||
longest_move_name = len(tutor_title)
|
||||
|
||||
def header_print(str):
|
||||
global header
|
||||
header += "// " + str + " " * (longest_move_name - len(str)) + " //\n"
|
||||
|
||||
header += "// " + longest_move_name * "*" + " //\n"
|
||||
header_print(tmhm_title)
|
||||
for move in tm_moves:
|
||||
header_print("- " + move)
|
||||
header += "// " + longest_move_name * "*" + " //\n"
|
||||
header_print(tutor_title)
|
||||
tutor_moves.sort() # alphabetically sort tutor moves for easier referencing
|
||||
for move in tutor_moves:
|
||||
header_print("- " + move)
|
||||
header += "// " + longest_move_name * "*" + " //\n"
|
||||
header_print(universal_title)
|
||||
universal_moves.sort() # alphabetically sort near-universal moves for easier referencing
|
||||
for move in universal_moves:
|
||||
header_print("- " + move)
|
||||
header += "// " + longest_move_name * "*" + " //\n\n"
|
||||
|
||||
if not "// DO NOT MODIFY THIS FILE!" in out:
|
||||
out = header + out
|
||||
else:
|
||||
out = re.sub(r"\/\/\n\/\/ DO NOT MODIFY THIS FILE!(.|\n)*\* \/\/\n\n", header, out)
|
||||
|
||||
with open("./src/data/pokemon/teachable_learnsets.h", 'w') as file:
|
||||
file.write(out)
|
||||
Loading…
x
Reference in New Issue
Block a user