增加Msys2使用mingw64编译指南

This commit is contained in:
ExMingYan 2025-04-28 16:09:05 +08:00
parent 57b61cff7a
commit e151fa83df
16 changed files with 68 additions and 51 deletions

View File

@ -22,7 +22,7 @@
[WSL 安装说明](docs/install/windows/WSL_ZH.md)
[Msys2 安装说明](docs/install/windows/MSYS2.md)
[Msys2 安装说明](docs/install/windows/MSYS2_ZH.md)
[Cygwin 安装说明](docs/install/windows/CYGWIN.md)

View File

@ -11,7 +11,7 @@ import png
SPRITESHEETS = [('gen1.png', 15, 11, 1)]
output_dir = 'sprites'
index_to_name = {}
with open('names.txt', 'r') as f:
with open('names.txt', 'r', encoding="utf-8") as f:
for line in f:
index, name = line.split(' ')[:2]
name = name.strip()
@ -72,7 +72,7 @@ def apply_palette(palette_file, input_file, output_file): # Apply one file's pa
inp = png.Reader(input_file)
w, h, rows, _ = inp.read()
src_palette = tuple(c[:3] for c in inp.palette())
with open(output_file, 'wb') as f:
with open(output_file, 'wb', encoding="utf-8") as f:
new_rows = []
for row in rows:
new_rows.append([closest_color(src_palette[c], target_palette) if c else 0 for c in row])

View File

@ -8,7 +8,7 @@ blank_regex = re.compile(r'\(?_+\)?')
# Converts a series of message lines to a better format
def convert_messages(infile, outfile='emotions.txt'):
with open(infile, 'r') as f_in, open(outfile, 'w') as f_out:
with open(infile, 'r', encoding="utf-8") as f_in, open(outfile, 'w', encoding="utf-8") as f_out:
for line in f_in:
line = line.rstrip('\n')
if line and line[0] == '-':
@ -35,11 +35,11 @@ def prepare_string(s):
# Exports up to n messages in C format to outfile
def export_messages(infile, outfile, n=None, indent=0, start=0):
with open(infile, 'r') as f_in:
with open(infile, 'r', encoding="utf-8") as f_in:
lines = f_in.readlines()
if n is not None:
lines = lines[:n]
with open(outfile, 'w') as f_out:
with open(outfile, 'w', encoding="utf-8") as f_out:
codelines = [' '*indent + f'static const u8 sCondMsg{start+i:02d}[] = _("{prepare_string(s)}");' for i, s in enumerate(lines)]
f_out.write('\n'.join(codelines))
print(f'{len(lines)} lines written')

View File

@ -39,7 +39,7 @@ def apply_palette(palette_file, input_file, output_file): # Apply one file's pa
w, h, rows, info = inp.read()
src_palette = tuple(c[:3] for c in inp.palette())
new_rows = [[closest_color(src_palette[c][:3], target_palette) if c else 0 for c in row] for row in rows]
with open(output_file, 'wb') as f:
with open(output_file, 'wb', encoding="utf-8") as f:
w = png.Writer(width=w, height=h, bitdepth=4, palette=target_palette)
w.write(f, new_rows)

View File

@ -12,7 +12,7 @@ def extract_palette(path):
r.read()
root, _ = os.path.splitext(path)
out_path = root + '.pal'
with open(out_path, 'w', newline='\r\n') as f:
with open(out_path, 'w', newline='\r\n', encoding="utf-8") as f:
f.write(PAL_PRELUDE)
colors = r.palette()
if len(colors) < 16:

View File

@ -0,0 +1,17 @@
# Msys2编译指南
## 安装并配置Msys2
1. 在Msys2的[github](https://github.com/msys2/msys2-installer/releases)页面中下载.exe下载完成后打开并安装
2. 安装后进入Msys2安装目录运行mingw64.exe
3. 执行下面的命令以安装需要的依赖
```bash
pacman -S mingw-w64-x86_64-arm-none-eabi-toolchain mingw-w64-x86_64-toolchain mingw-w64-x86_64-libpng mingw-w64-x86_64-python make
```
## 编译
使用cd命令进入存放源码的目录输入make等待编译完成即可

View File

@ -8,7 +8,7 @@ if not os.path.exists("Makefile"):
# Read contest_opponents.h
for file in glob.glob('./src/data/contest_opponents.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
source_content = f.read()
# Extract party info from contest_opponents.h
@ -21,7 +21,7 @@ for match in source_pattern.findall(source_content):
# Read contest_opponents.h content
for file in glob.glob('./src/data/contest_opponents.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
destination_content = f.read()
# Modify contest_opponents.h content
@ -40,6 +40,6 @@ modified_content = destination_pattern.sub(add_filter_data, destination_content)
# Write the modified content back to contest_opponents.h
for file in glob.glob('./src/data/contest_opponents.h'):
with open(file, 'w') as f:
with open(file, 'w', encoding="utf-8") as f:
f.write(modified_content)
print("contest_opponents.h has been updated")

View File

@ -8,7 +8,7 @@ if not os.path.exists("Makefile"):
# Read battle_frontier_trainer_mons.h and extract the party information
for file in glob.glob('./src/data/battle_frontier/battle_frontier_trainer_mons.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
source_content = f.read()
# Extract party info from battle_frontier_trainer_mons.h
@ -21,7 +21,7 @@ for match in source_pattern.findall(source_content):
# Read battle_frontier_trainers.h content
for file in glob.glob('./src/data/battle_frontier/battle_frontier_trainers.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
destination_content = f.read()
# Modify battle_frontier_trainers.h content
@ -39,6 +39,6 @@ modified_content = destination_pattern.sub(add_party_data, destination_content)
# Write the modified content back to battle_frontier_trainers.h
for file in glob.glob('./src/data/battle_frontier/battle_frontier_trainers.h'):
with open(file, 'w') as f:
with open(file, 'w', encoding="utf-8") as f:
f.write(modified_content)
print("battle_frontier_trainers.h has been updated")

View File

@ -18,13 +18,13 @@ array_pories = []
# make a list of which script corresponds to which item
for file in incs_to_check:
with open(file, "r") as f2:
with open(file, "r", encoding="utf-8") as f2:
raw = f2.read()
array += re.findall("(.*)::\n[ ]*finditem (.*)\n[ ]*end", raw)
# since this doesn't catch poryscript-generated inc files, do the same for poryscript
for file in pories_to_check:
with open(file, "r") as f2:
with open(file, "r", encoding="utf-8") as f2:
raw = f2.read()
array_pories += re.findall("script ([\w]*)[ \n]*\{[ \n]*finditem\((.*)\)[ \n]*\}", raw)
@ -38,7 +38,7 @@ for x in array:
# apply changes to inc files
for map in glob.glob('./data/maps/*/map.json'):
with open(map, "r") as f2:
with open(map, "r", encoding="utf-8") as f2:
data = json.load(f2)
if not 'object_events' in data:
continue
@ -46,13 +46,13 @@ for map in glob.glob('./data/maps/*/map.json'):
if objevent["script"] in dict:
objevent["trainer_sight_or_berry_tree_id"] = dict[objevent["script"]]
objevent["script"] = "Common_EventScript_FindItem"
with open(map, "w") as f2:
with open(map, "w", encoding="utf-8") as f2:
f2.write(json.dumps(data, indent=2) + "\n")
# do another map search to find out which finditem scripts would somehow be still in use
still_in_use = []
for map in glob.glob('./data/maps/*/map.json'):
with open(map, "r") as f2:
with open(map, "r", encoding="utf-8") as f2:
data = json.load(f2)
if not 'object_events' in data:
continue
@ -66,20 +66,20 @@ for x in list(dict.keys()):
# clean up scripts that are now no longer in use
for file in incs_to_check:
with open(file, "r") as f2:
with open(file, "r", encoding="utf-8") as f2:
raw = f2.read()
for unused in list(dict.keys()):
raw = re.sub("%s::\n[ ]*finditem (.*)\n[ ]*end\n*" % unused, "", raw)
with open(file, "w") as f2:
with open(file, "w", encoding="utf-8") as f2:
f2.write(raw)
# also clean up pory files
for file in pories_to_check:
with open(file, "r") as f2:
with open(file, "r", encoding="utf-8") as f2:
raw = f2.read()
for unused in list(dict.keys()):
raw = re.sub("script %s[ \n]*\{[ \n]*finditem\((.*)\)[ \n]*\}[ \n]*" % unused, "", raw)
with open(file, "w") as f2:
with open(file, "w", encoding="utf-8") as f2:
f2.write(raw)
print("Done!")

View File

@ -8,7 +8,7 @@ def IsCommaMissing(line: str):
return False
return True
input_file = open('./src/data/moves_info.h', 'r')
input_file = open('./src/data/moves_info.h', 'r', encoding="utf-8")
lines = input_file.readlines()
input_file.close()
@ -41,23 +41,23 @@ for line in lines:
moves_info_lines.append(line)
output_file_mi = open('./src/data/moves_info.h', 'w')
output_file_mi = open('./src/data/moves_info.h', 'w', encoding="utf-8")
output_file_mi.writelines(moves_info_lines)
output_file_mi.close()
output_file_bas = open('./include/battle_anim_scripts.h', 'w')
output_file_bas = open('./include/battle_anim_scripts.h', 'w', encoding="utf-8")
output_file_bas.writelines('#ifndef GUARD_BATTLE_ANIM_SCRIPTS_H\n')
output_file_bas.writelines('#define GUARD_BATTLE_ANIM_SCRIPTS_H\n\n')
output_file_bas.writelines(battle_anim_lines)
output_file_bas.writelines('\n#endif // GUARD_BATTLE_ANIM_SCRIPTS_H\n')
output_file_bas.close()
b_anim_scripts_s = open('./data/battle_anim_scripts.s', 'r')
b_anim_scripts_s = open('./data/battle_anim_scripts.s', 'r', encoding="utf-8")
lines = b_anim_scripts_s.read()
b_anim_scripts_s.close()
lines = re.sub(r'(Move_[A-Za-z0-9_]*)([:]+)', r'\1::', lines)
b_anim_scripts_s = open('./data/battle_anim_scripts.s', 'w')
b_anim_scripts_s = open('./data/battle_anim_scripts.s', 'w', encoding="utf-8")
b_anim_scripts_s.write(lines)
b_anim_scripts_s.close()

View File

@ -51,12 +51,12 @@ def battle_frontier_mons(data):
return data
with open('src/data/battle_frontier/battle_frontier_mons.h', 'r') as file:
with open('src/data/battle_frontier/battle_frontier_mons.h', 'r', encoding="utf-8") as file:
data = file.read()
with open('src/data/battle_frontier/battle_frontier_mons.h', 'w') as file:
with open('src/data/battle_frontier/battle_frontier_mons.h', 'w', encoding="utf-8") as file:
file.write(battle_frontier_mons(data))
with open('src/data/battle_frontier/battle_tent.h', 'r') as file:
with open('src/data/battle_frontier/battle_tent.h', 'r', encoding="utf-8") as file:
data = file.read()
with open('src/data/battle_frontier/battle_tent.h', 'w') as file:
with open('src/data/battle_frontier/battle_tent.h', 'w', encoding="utf-8") as file:
file.write(battle_frontier_mons(data))

View File

@ -8,7 +8,7 @@ if not os.path.exists("Makefile"):
# Read item_icon_table.h and extract the icon and palette information
for file in glob.glob('./src/data/item_icon_table.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
icon_table_content = f.read()
# Extract item icon and palette data from item_icon_table.h
@ -21,7 +21,7 @@ for match in icon_table_pattern.findall(icon_table_content):
# Read items.h content
for file in glob.glob('./src/data/items.h'):
with open(file, 'r') as f:
with open(file, 'r', encoding="utf-8") as f:
items_content = f.read()
# Modify items.h content
@ -40,6 +40,6 @@ modified_items_content = item_pattern.sub(add_icon_data, items_content)
# Write the modified content back to items.h
for file in glob.glob('./src/data/items.h'):
with open(file, 'w') as f:
with open(file, 'w', encoding="utf-8") as f:
f.write(modified_items_content)
print("items.h has been updated")

View File

@ -314,6 +314,6 @@ if __name__ == '__main__':
except:
print(f"usage: python3 {sys.argv[0]} <trainers.h> <trainer_parties.h> <out>")
else:
with open(trainers_in_path, "r") as trainers_in_h, open(parties_in_path, "r") as parties_in_h, open(out_path, "w") as out_party:
with open(trainers_in_path, "r", encoding="utf-8") as trainers_in_h, open(parties_in_path, "r", encoding="utf-8") as parties_in_h, open(out_path, "w", encoding="utf-8") as out_party:
parties = convert_parties(parties_in_path, parties_in_h)
trainers = convert_trainers(trainers_in_path, trainers_in_h, parties, out_party)

View File

@ -325,6 +325,6 @@ if __name__ == '__main__':
except:
print(f"usage: python3 {sys.argv[0]} <trainers.h> <trainer_parties.h> <out>")
else:
with open(trainers_in_path, "r") as trainers_in_h, open(parties_in_path, "r") as parties_in_h, open(out_path, "w") as out_party:
with open(trainers_in_path, "r", encoding="utf-8") as trainers_in_h, open(parties_in_path, "r", encoding="utf-8") as parties_in_h, open(out_path, "w", encoding="utf-8") as out_party:
parties = convert_parties(parties_in_path, parties_in_h)
trainers = convert_trainers(trainers_in_path, trainers_in_h, parties, out_party)

View File

@ -9,7 +9,7 @@ exceptions = [ # the following exceptions are hardcoded to streamline the proces
]
# convert egg_moves.h to the new format
with open("src/data/pokemon/egg_moves.h", "r") as f:
with open("src/data/pokemon/egg_moves.h", "r", encoding="utf-8") as f:
data = f.read()
data = re.sub(r"#define(.|\n)*const u16 gEggMoves\[\] = {", "static const u16 sNoneEggMoveLearnset[] = {\n MOVE_UNAVAILABLE,\n};\n", data) # remove and replace header
@ -29,13 +29,13 @@ data = re.sub(r"\),\n", ",\n MOVE_UNAVAILABLE,\n};\n", data) # add termin
data = re.sub(r" MOVE_", " MOVE_", data) # fix indentation
with open("src/data/pokemon/egg_moves.h", "w") as f:
with open("src/data/pokemon/egg_moves.h", "w", encoding="utf-8") as f:
f.write(data)
# update gBaseStats
for file in glob.glob('./src/data/pokemon/species_info/gen_*_families.h'):
with open(file, "r") as f:
with open(file, "r", encoding="utf-8") as f:
data = f.read()
# go through all Pokemon with teachable learnsets that are also in the list, then assign egg moves to them
@ -47,5 +47,5 @@ for file in glob.glob('./src/data/pokemon/species_info/gen_*_families.h'):
if len(macrocheck) > 0:
data = re.sub(r"\.teachableLearnset = s" + mon + r"TeachableLearnset," + macrocheck[0] + r"\\\\", ".teachableLearnset = s%sTeachableLearnset,%s\\\\\n .eggMoveLearnset = s%sEggMoveLearnset,%s\\\\" % (mon, macrocheck[0], mon, " " * (len(macrocheck[0]) + 4)), data)
with open(file, "w") as f:
with open(file, "w", encoding="utf-8") as f:
f.write(data)

View File

@ -4,7 +4,7 @@ import json
import os
# before all else, abort if the config is off
with open("./include/config/pokemon.h", "r") as file:
with open("./include/config/pokemon.h", "r", encoding="utf-8") as file:
learnset_config = re.findall(r"#define P_LEARNSET_HELPER_TEACHABLE *([^ ]*)", file.read())
if len(learnset_config) != 1:
quit()
@ -25,7 +25,7 @@ if len(incs_to_check) == 0: # disabled if no jsons present
quit()
for file in incs_to_check:
with open(file, 'r') as f2:
with open(file, 'r', encoding="utf-8") as f2:
raw = f2.read()
if 'special ChooseMonForMoveTutor' in raw:
for x in re.findall(r'setvar VAR_0x8005, (MOVE_.*)', raw):
@ -33,14 +33,14 @@ for file in incs_to_check:
tutor_moves.append(x)
# scan TMs and HMs
with open("./include/constants/tms_hms.h", 'r') as file:
with open("./include/constants/tms_hms.h", 'r', encoding="utf-8") as file:
for x in re.findall(r'F\((.*)\)', file.read()):
if not 'MOVE_' + x in tm_moves:
tm_moves.append('MOVE_' + x)
# look up universal moves to exclude them
universal_moves = []
with open("./src/pokemon.c", "r") as file:
with open("./src/pokemon.c", "r", encoding="utf-8") as file:
for x in re.findall(r"static const u16 sUniversalMoves\[\] =(.|\n)*?{((.|\n)*?)};", file.read())[0]:
x = x.replace("\n", "")
for y in x.split(","):
@ -53,7 +53,7 @@ with open("./src/pokemon.c", "r") as file:
def construct_compatibility_dict(force_custom_check):
dict_out = {}
for pth in glob.glob('./tools/learnset_helpers/porymoves_files/*.json'):
f = open(pth, 'r')
f = open(pth, 'r', encoding="utf-8")
data = json.load(f)
for mon in data.keys():
if not mon in dict_out:
@ -75,7 +75,7 @@ def construct_compatibility_dict(force_custom_check):
dict_out[mon].append(move)
# if the file was not previously generated, check if there is custom data there that needs to be preserved
with open("./src/data/pokemon/teachable_learnsets.h", 'r') as file:
with open("./src/data/pokemon/teachable_learnsets.h", 'r', encoding="utf-8") as file:
raw = file.read()
if not "// DO NOT MODIFY THIS FILE!" in raw and force_custom_check == True:
custom_teachable_compatibilities = {}
@ -103,7 +103,7 @@ def construct_compatibility_dict(force_custom_check):
custom_teachable_compatibilities[monname].append(move)
# actually store the data in custom.json
if os.path.exists("./tools/learnset_helpers/porymoves_files/custom.json"):
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "r")
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "r", encoding="utf-8")
custom_json = json.load(f2)
f2.close()
else:
@ -115,7 +115,7 @@ def construct_compatibility_dict(force_custom_check):
custom_json[x] = {"LevelMoves": [], "PreEvoMoves": [], "TMMoves": [], "EggMoves": [], "TutorMoves": []}
for move in custom_teachable_compatibilities[x]:
custom_json[x]["TutorMoves"].append(move)
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "w")
f2 = open("./tools/learnset_helpers/porymoves_files/custom.json", "w", encoding="utf-8")
f2.write(json.dumps(custom_json, indent=2))
f2.close()
print("FIRST RUN: Updated custom.json with teachable_learnsets.h's data")
@ -126,7 +126,7 @@ def construct_compatibility_dict(force_custom_check):
compatibility_dict = construct_compatibility_dict(True)
# actually prepare the file
with open("./src/data/pokemon/teachable_learnsets.h", 'r') as file:
with open("./src/data/pokemon/teachable_learnsets.h", 'r', encoding="utf-8") as file:
out = file.read()
list_of_mons = re.findall(r'static const u16 s(.*)TeachableLearnset', out)
for mon in list_of_mons:
@ -210,5 +210,5 @@ if not "// DO NOT MODIFY THIS FILE!" in out:
else:
out = re.sub(r"\/\/\n\/\/ DO NOT MODIFY THIS FILE!(.|\n)*\* \/\/\n\n", header, out)
with open("./src/data/pokemon/teachable_learnsets.h", 'w') as file:
with open("./src/data/pokemon/teachable_learnsets.h", 'w', encoding="utf-8", newline="\n") as file:
file.write(out)