adjust irq10 and rcnt for pal
[pcsx_rearmed.git] / deps / libretro-common / samples / core_options / example_translation / translation scripts / intl / core_opt_translation.py
CommitLineData
3719602c
PC
1#!/usr/bin/env python3
2
3"""Core options text extractor
4
5The purpose of this script is to set up & provide functions for automatic generation of 'libretro_core_options_intl.h'
6from 'libretro_core_options.h' using translations from Crowdin.
7
8Both v1 and v2 structs are supported. It is, however, recommended to convert v1 files to v2 using the included
9'v1_to_v2_converter.py'.
10
11Usage:
12python3 path/to/core_opt_translation.py "path/to/where/libretro_core_options.h & libretro_core_options_intl.h/are"
13
14This script will:
151.) create key words for & extract the texts from libretro_core_options.h & save them into intl/_us/core_options.h
162.) do the same for any present translations in libretro_core_options_intl.h, saving those in their respective folder
17"""
18import core_option_regex as cor
19import re
20import os
21import sys
22import json
23import urllib.request as req
24import shutil
25
26# for uploading translations to Crowdin, the Crowdin 'language id' is required
27LANG_CODE_TO_ID = {'_ar': 'ar',
28 '_ast': 'ast',
29 '_chs': 'zh-CN',
30 '_cht': 'zh-TW',
31 '_cs': 'cs',
32 '_cy': 'cy',
33 '_da': 'da',
34 '_de': 'de',
35 '_el': 'el',
36 '_eo': 'eo',
37 '_es': 'es-ES',
38 '_fa': 'fa',
39 '_fi': 'fi',
40 '_fr': 'fr',
41 '_gl': 'gl',
42 '_he': 'he',
43 '_hu': 'hu',
44 '_id': 'id',
45 '_it': 'it',
46 '_ja': 'ja',
47 '_ko': 'ko',
48 '_nl': 'nl',
49 '_pl': 'pl',
50 '_pt_br': 'pt-BR',
51 '_pt_pt': 'pt-PT',
52 '_ru': 'ru',
53 '_sk': 'sk',
54 '_sv': 'sv-SE',
55 '_tr': 'tr',
56 '_uk': 'uk',
57 '_vn': 'vi'}
58LANG_CODE_TO_R_LANG = {'_ar': 'RETRO_LANGUAGE_ARABIC',
59 '_ast': 'RETRO_LANGUAGE_ASTURIAN',
60 '_chs': 'RETRO_LANGUAGE_CHINESE_SIMPLIFIED',
61 '_cht': 'RETRO_LANGUAGE_CHINESE_TRADITIONAL',
62 '_cs': 'RETRO_LANGUAGE_CZECH',
63 '_cy': 'RETRO_LANGUAGE_WELSH',
64 '_da': 'RETRO_LANGUAGE_DANISH',
65 '_de': 'RETRO_LANGUAGE_GERMAN',
66 '_el': 'RETRO_LANGUAGE_GREEK',
67 '_eo': 'RETRO_LANGUAGE_ESPERANTO',
68 '_es': 'RETRO_LANGUAGE_SPANISH',
69 '_fa': 'RETRO_LANGUAGE_PERSIAN',
70 '_fi': 'RETRO_LANGUAGE_FINNISH',
71 '_fr': 'RETRO_LANGUAGE_FRENCH',
72 '_gl': 'RETRO_LANGUAGE_GALICIAN',
73 '_he': 'RETRO_LANGUAGE_HEBREW',
74 '_hu': 'RETRO_LANGUAGE_HUNGARIAN',
75 '_id': 'RETRO_LANGUAGE_INDONESIAN',
76 '_it': 'RETRO_LANGUAGE_ITALIAN',
77 '_ja': 'RETRO_LANGUAGE_JAPANESE',
78 '_ko': 'RETRO_LANGUAGE_KOREAN',
79 '_nl': 'RETRO_LANGUAGE_DUTCH',
80 '_pl': 'RETRO_LANGUAGE_POLISH',
81 '_pt_br': 'RETRO_LANGUAGE_PORTUGUESE_BRAZIL',
82 '_pt_pt': 'RETRO_LANGUAGE_PORTUGUESE_PORTUGAL',
83 '_ru': 'RETRO_LANGUAGE_RUSSIAN',
84 '_sk': 'RETRO_LANGUAGE_SLOVAK',
85 '_sv': 'RETRO_LANGUAGE_SWEDISH',
86 '_tr': 'RETRO_LANGUAGE_TURKISH',
87 '_uk': 'RETRO_LANGUAGE_UKRAINIAN',
88 '_us': 'RETRO_LANGUAGE_ENGLISH',
89 '_vn': 'RETRO_LANGUAGE_VIETNAMESE'}
90
91# these are handled by RetroArch directly - no need to include them in core translations
92ON_OFFS = {'"enabled"', '"disabled"', '"true"', '"false"', '"on"', '"off"'}
93
94
95def remove_special_chars(text: str, char_set=0) -> str:
96 """Removes special characters from a text.
97
98 :param text: String to be cleaned.
99 :param char_set: 0 -> remove all ASCII special chars except for '_' & 'space';
100 1 -> remove invalid chars from file names
101 :return: Clean text.
102 """
103 command_chars = [chr(unicode) for unicode in tuple(range(0, 32)) + (127,)]
104 special_chars = ([chr(unicode) for unicode in tuple(range(33, 48)) + tuple(range(58, 65)) + tuple(range(91, 95))
105 + (96,) + tuple(range(123, 127))],
106 ('\\', '/', ':', '*', '?', '"', '<', '>', '|'))
107 res = text
108 for cm in command_chars:
109 res = res.replace(cm, '_')
110 for sp in special_chars[char_set]:
111 res = res.replace(sp, '_')
112 while res.startswith('_'):
113 res = res[1:]
114 while res.endswith('_'):
115 res = res[:-1]
116 return res
117
118
119def clean_file_name(file_name: str) -> str:
120 """Removes characters which might make file_name inappropriate for files on some OS.
121
122 :param file_name: File name to be cleaned.
123 :return: The clean file name.
124 """
125 file_name = remove_special_chars(file_name, 1)
126 file_name = re.sub(r'__+', '_', file_name.replace(' ', '_'))
127 return file_name
128
129
130def get_struct_type_name(decl: str) -> tuple:
131 """ Returns relevant parts of the struct declaration:
132 type, name of the struct and the language appendix, if present.
133 :param decl: The struct declaration matched by cor.p_type_name.
134 :return: Tuple, e.g.: ('retro_core_option_definition', 'option_defs_us', '_us')
135 """
136 struct_match = cor.p_type_name.search(decl)
137 if struct_match:
138 if struct_match.group(3):
139 struct_type_name = struct_match.group(1, 2, 3)
140 return struct_type_name
141 elif struct_match.group(4):
142 struct_type_name = struct_match.group(1, 2, 4)
143 return struct_type_name
144 else:
145 struct_type_name = struct_match.group(1, 2)
146 return struct_type_name
147 else:
148 raise ValueError(f'No or incomplete struct declaration: {decl}!\n'
149 'Please make sure all structs are complete, including the type and name declaration.')
150
151
152def is_viable_non_dupe(text: str, comparison) -> bool:
153 """text must be longer than 2 ('""'), not 'NULL' and not in comparison.
154
155 :param text: String to be tested.
156 :param comparison: Dictionary or set to search for text in.
157 :return: bool
158 """
159 return 2 < len(text) and text != 'NULL' and text not in comparison
160
161
162def is_viable_value(text: str) -> bool:
163 """text must be longer than 2 ('""'), not 'NULL' and text.lower() not in
164 {'"enabled"', '"disabled"', '"true"', '"false"', '"on"', '"off"'}.
165
166 :param text: String to be tested.
167 :return: bool
168 """
169 return 2 < len(text) and text != 'NULL' and text.lower() not in ON_OFFS
170
171
172def create_non_dupe(base_name: str, opt_num: int, comparison) -> str:
173 """Makes sure base_name is not in comparison, and if it is it's renamed.
174
175 :param base_name: Name to check/make unique.
176 :param opt_num: Number of the option base_name belongs to, used in making it unique.
177 :param comparison: Dictionary or set to search for base_name in.
178 :return: Unique name.
179 """
180 h = base_name
181 if h in comparison:
182 n = 0
183 h = h + '_O' + str(opt_num)
184 h_end = len(h)
185 while h in comparison:
186 h = h[:h_end] + '_' + str(n)
187 n += 1
188 return h
189
190
191def get_texts(text: str) -> dict:
192 """Extracts the strings, which are to be translated/are the translations,
193 from text and creates macro names for them.
194
195 :param text: The string to be parsed.
196 :return: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
197 """
198 # all structs: group(0) full struct, group(1) beginning, group(2) content
199 structs = cor.p_struct.finditer(text)
200 hash_n_string = {}
201 just_string = {}
202 for struct in structs:
203 struct_declaration = struct.group(1)
204 struct_type_name = get_struct_type_name(struct_declaration)
205 if 3 > len(struct_type_name):
206 lang = '_us'
207 else:
208 lang = struct_type_name[2]
209 if lang not in just_string:
210 hash_n_string[lang] = {}
211 just_string[lang] = set()
212
213 is_v2 = False
214 pre_name = ''
215 p = cor.p_info
216 if 'retro_core_option_v2_definition' == struct_type_name[0]:
217 is_v2 = True
218 elif 'retro_core_option_v2_category' == struct_type_name[0]:
219 pre_name = 'CATEGORY_'
220 p = cor.p_info_cat
221
222 struct_content = struct.group(2)
223 # 0: full option; 1: key; 2: description; 3: additional info; 4: key/value pairs
224 struct_options = cor.p_option.finditer(struct_content)
225 for opt, option in enumerate(struct_options):
226 # group 1: key
227 if option.group(1):
228 opt_name = pre_name + option.group(1)
229 # no special chars allowed in key
230 opt_name = remove_special_chars(opt_name).upper().replace(' ', '_')
231 else:
232 raise ValueError(f'No option name (key) found in struct {struct_type_name[1]} option {opt}!')
233
234 # group 2: description0
235 if option.group(2):
236 desc0 = option.group(2)
237 if is_viable_non_dupe(desc0, just_string[lang]):
238 just_string[lang].add(desc0)
239 m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL'), opt, hash_n_string[lang])
240 hash_n_string[lang][m_h] = desc0
241 else:
242 raise ValueError(f'No label found in struct {struct_type_name[1]} option {option.group(1)}!')
243
244 # group 3: desc1, info0, info1, category
245 if option.group(3):
246 infos = option.group(3)
247 option_info = p.finditer(infos)
248 if is_v2:
249 desc1 = next(option_info).group(1)
250 if is_viable_non_dupe(desc1, just_string[lang]):
251 just_string[lang].add(desc1)
252 m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL_CAT'), opt, hash_n_string[lang])
253 hash_n_string[lang][m_h] = desc1
254 last = None
255 m_h = None
256 for j, info in enumerate(option_info):
257 last = info.group(1)
258 if is_viable_non_dupe(last, just_string[lang]):
259 just_string[lang].add(last)
260 m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
261 hash_n_string[lang])
262 hash_n_string[lang][m_h] = last
263 if last in just_string[lang]: # category key should not be translated
264 hash_n_string[lang].pop(m_h)
265 just_string[lang].remove(last)
266 else:
267 for j, info in enumerate(option_info):
268 gr1 = info.group(1)
269 if is_viable_non_dupe(gr1, just_string[lang]):
270 just_string[lang].add(gr1)
271 m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
272 hash_n_string[lang])
273 hash_n_string[lang][m_h] = gr1
274 else:
275 raise ValueError(f'Too few arguments in struct {struct_type_name[1]} option {option.group(1)}!')
276
277 # group 4:
278 if option.group(4):
279 for j, kv_set in enumerate(cor.p_key_value.finditer(option.group(4))):
280 set_key, set_value = kv_set.group(1, 2)
281 if not is_viable_value(set_value):
282 if not is_viable_value(set_key):
283 continue
284 set_value = set_key
285 # re.fullmatch(r'(?:[+-][0-9]+)+', value[1:-1])
286 if set_value not in just_string[lang] and not re.sub(r'[+-]', '', set_value[1:-1]).isdigit():
287 clean_key = set_key.encode('ascii', errors='ignore').decode('unicode-escape')[1:-1]
288 clean_key = remove_special_chars(clean_key).upper().replace(' ', '_')
289 m_h = create_non_dupe(re.sub(r'__+', '_', f"OPTION_VAL_{clean_key}"), opt, hash_n_string[lang])
290 hash_n_string[lang][m_h] = set_value
291 just_string[lang].add(set_value)
292 return hash_n_string
293
294
295def create_msg_hash(intl_dir_path: str, core_name: str, keyword_string_dict: dict) -> dict:
296 """Creates '<core_name>.h' files in 'intl/_<lang>/' containing the macro name & string combinations.
297
298 :param intl_dir_path: Path to the intl directory.
299 :param core_name: Name of the core, used for naming the files.
300 :param keyword_string_dict: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
301 :return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
302 """
303 files = {}
304 for localisation in keyword_string_dict:
305 path = os.path.join(intl_dir_path, localisation) # intl/_<lang>
306 files[localisation] = os.path.join(path, core_name + '.h') # intl/_<lang>/<core_name>.h
307 if not os.path.exists(path):
308 os.makedirs(path)
309 with open(files[localisation], 'w', encoding='utf-8') as crowdin_file:
310 out_text = ''
311 for keyword in keyword_string_dict[localisation]:
312 out_text = f'{out_text}{keyword} {keyword_string_dict[localisation][keyword]}\n'
313 crowdin_file.write(out_text)
314 return files
315
316
317def h2json(file_paths: dict) -> dict:
318 """Converts .h files pointed to by file_paths into .jsons.
319
320 :param file_paths: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
321 :return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.json)', ... }.
322 """
323 jsons = {}
324 for file_lang in file_paths:
325 jsons[file_lang] = file_paths[file_lang][:-2] + '.json'
326
327 p = cor.p_masked
328
329 with open(file_paths[file_lang], 'r+', encoding='utf-8') as h_file:
330 text = h_file.read()
331 result = p.finditer(text)
332 messages = {}
333 for msg in result:
334 key, val = msg.group(1, 2)
335 if key not in messages:
336 if key and val:
337 # unescape & remove "\n"
338 messages[key] = re.sub(r'"\s*(?:(?:/\*(?:.|[\r\n])*?\*/|//.*[\r\n]+)\s*)*"',
339 '\\\n', val[1:-1].replace('\\\"', '"'))
340 else:
341 print(f"DUPLICATE KEY in {file_paths[file_lang]}: {key}")
342 with open(jsons[file_lang], 'w', encoding='utf-8') as json_file:
343 json.dump(messages, json_file, indent=2)
344
345 return jsons
346
347
348def json2h(intl_dir_path: str, json_file_path: str, core_name: str) -> None:
349 """Converts .json file in json_file_path into an .h ready to be included in C code.
350
351 :param intl_dir_path: Path to the intl directory.
352 :param json_file_path: Base path of translation .json.
353 :param core_name: Name of the core, required for naming the files.
354 :return: None
355 """
356 h_filename = os.path.join(json_file_path, core_name + '.h')
357 json_filename = os.path.join(json_file_path, core_name + '.json')
358 file_lang = os.path.basename(json_file_path).upper()
359
360 if os.path.basename(json_file_path).lower() == '_us':
361 print(' skipped')
362 return
363
364 p = cor.p_masked
365
366 def update(s_messages, s_template, s_source_messages):
367 translation = ''
368 template_messages = p.finditer(s_template)
369 for tp_msg in template_messages:
370 old_key = tp_msg.group(1)
371 if old_key in s_messages and s_messages[old_key] != s_source_messages[old_key]:
372 tl_msg_val = s_messages[old_key]
373 tl_msg_val = tl_msg_val.replace('"', '\\\"').replace('\n', '') # escape
374 translation = ''.join((translation, '#define ', old_key, file_lang, f' "{tl_msg_val}"\n'))
375
376 else: # Remove English duplicates and non-translatable strings
377 translation = ''.join((translation, '#define ', old_key, file_lang, ' NULL\n'))
378 return translation
379
380 with open(os.path.join(intl_dir_path, '_us', core_name + '.h'), 'r', encoding='utf-8') as template_file:
381 template = template_file.read()
382 with open(os.path.join(intl_dir_path, '_us', core_name + '.json'), 'r+', encoding='utf-8') as source_json_file:
383 source_messages = json.load(source_json_file)
384 with open(json_filename, 'r+', encoding='utf-8') as json_file:
385 messages = json.load(json_file)
386 new_translation = update(messages, template, source_messages)
387 with open(h_filename, 'w', encoding='utf-8') as h_file:
388 h_file.seek(0)
389 h_file.write(new_translation)
390 h_file.truncate()
391 return
392
393
394def get_crowdin_client(dir_path: str) -> str:
395 """Makes sure the Crowdin CLI client is present. If it isn't, it is fetched & extracted.
396
397 :return: The path to 'crowdin-cli.jar'.
398 """
399 jar_name = 'crowdin-cli.jar'
400 jar_path = os.path.join(dir_path, jar_name)
401
402 if not os.path.isfile(jar_path):
403 print('Downloading crowdin-cli.jar')
404 crowdin_cli_file = os.path.join(dir_path, 'crowdin-cli.zip')
405 crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/crowdin-cli.zip'
406 req.urlretrieve(crowdin_cli_url, crowdin_cli_file)
407 import zipfile
408 with zipfile.ZipFile(crowdin_cli_file, 'r') as zip_ref:
409 jar_dir = zip_ref.namelist()[0]
410 for file in zip_ref.namelist():
411 if file.endswith(jar_name):
412 jar_file = file
413 break
414 zip_ref.extract(jar_file)
415 os.rename(jar_file, jar_path)
416 os.remove(crowdin_cli_file)
417 shutil.rmtree(jar_dir)
418 return jar_path
419
420
421def create_intl_file(intl_file_path: str, intl_dir_path: str, text: str, core_name: str, file_path: str) -> None:
422 """Creates 'libretro_core_options_intl.h' from Crowdin translations.
423
424 :param intl_file_path: Path to 'libretro_core_options_intl.h'
425 :param intl_dir_path: Path to the intl directory.
426 :param text: Content of the 'libretro_core_options.h' being translated.
427 :param core_name: Name of the core. Needed to identify the files to pull the translations from.
428 :param file_path: Path to the '<core name>_us.h' file, containing the original English texts.
429 :return: None
430 """
431 msg_dict = {}
432 lang_up = ''
433
434 def replace_pair(pair_match):
435 """Replaces a key-value-pair of an option with the macros corresponding to the language.
436
437 :param pair_match: The re match object representing the key-value-pair block.
438 :return: Replacement string.
439 """
440 offset = pair_match.start(0)
441 if pair_match.group(1): # key
442 if pair_match.group(2) in msg_dict: # value
443 val = msg_dict[pair_match.group(2)] + lang_up
444 elif pair_match.group(1) in msg_dict: # use key if value not viable (e.g. NULL)
445 val = msg_dict[pair_match.group(1)] + lang_up
446 else:
447 return pair_match.group(0)
448 else:
449 return pair_match.group(0)
450 res = pair_match.group(0)[:pair_match.start(2) - offset] + val \
451 + pair_match.group(0)[pair_match.end(2) - offset:]
452 return res
453
454 def replace_info(info_match):
455 """Replaces the 'additional strings' of an option with the macros corresponding to the language.
456
457 :param info_match: The re match object representing the 'additional strings' block.
458 :return: Replacement string.
459 """
460 offset = info_match.start(0)
461 if info_match.group(1) in msg_dict:
462 res = info_match.group(0)[:info_match.start(1) - offset] + \
463 msg_dict[info_match.group(1)] + lang_up + \
464 info_match.group(0)[info_match.end(1) - offset:]
465 return res
466 else:
467 return info_match.group(0)
468
469 def replace_option(option_match):
470 """Replaces strings within an option
471 '{ "opt_key", "label", "additional strings", ..., { {"key", "value"}, ... }, ... }'
472 within a struct with the macros corresponding to the language:
473 '{ "opt_key", MACRO_LABEL, MACRO_STRINGS, ..., { {"key", MACRO_VALUE}, ... }, ... }'
474
475 :param option_match: The re match object representing the option.
476 :return: Replacement string.
477 """
478 # label
479 offset = option_match.start(0)
480 if option_match.group(2):
481 res = option_match.group(0)[:option_match.start(2) - offset] + msg_dict[option_match.group(2)] + lang_up
482 else:
483 return option_match.group(0)
484 # additional block
485 if option_match.group(3):
486 res = res + option_match.group(0)[option_match.end(2) - offset:option_match.start(3) - offset]
487 new_info = p.sub(replace_info, option_match.group(3))
488 res = res + new_info
489 else:
490 return res + option_match.group(0)[option_match.end(2) - offset:]
491 # key-value-pairs
492 if option_match.group(4):
493 res = res + option_match.group(0)[option_match.end(3) - offset:option_match.start(4) - offset]
494 new_pairs = cor.p_key_value.sub(replace_pair, option_match.group(4))
495 res = res + new_pairs + option_match.group(0)[option_match.end(4) - offset:]
496 else:
497 res = res + option_match.group(0)[option_match.end(3) - offset:]
498
499 return res
500
501 with open(file_path, 'r+', encoding='utf-8') as template: # intl/_us/<core_name>.h
502 masked_msgs = cor.p_masked.finditer(template.read())
503 for msg in masked_msgs:
504 msg_dict[msg.group(2)] = msg.group(1)
505
506 with open(intl_file_path, 'r', encoding='utf-8') as intl: # libretro_core_options_intl.h
507 in_text = intl.read()
508 intl_start = re.search(re.escape('/*\n'
509 ' ********************************\n'
510 ' * Core Option Definitions\n'
511 ' ********************************\n'
512 '*/\n'), in_text)
513 if intl_start:
514 out_txt = in_text[:intl_start.end(0)]
515 else:
516 intl_start = re.search(re.escape('#ifdef __cplusplus\n'
517 'extern "C" {\n'
518 '#endif\n'), in_text)
519 out_txt = in_text[:intl_start.end(0)]
520
521 for folder in os.listdir(intl_dir_path): # intl/_*
522 if os.path.isdir(os.path.join(intl_dir_path, folder)) and folder.startswith('_')\
523 and folder != '_us' and folder != '__pycache__':
524 translation_path = os.path.join(intl_dir_path, folder, core_name + '.h') # <core_name>_<lang>.h
525 # all structs: group(0) full struct, group(1) beginning, group(2) content
526 struct_groups = cor.p_struct.finditer(text)
527 lang_up = folder.upper()
528 lang_low = folder.lower()
529 out_txt = out_txt + f'/* {LANG_CODE_TO_R_LANG[lang_low]} */\n\n' # /* RETRO_LANGUAGE_NAME */
530 with open(translation_path, 'r+', encoding='utf-8') as f_in: # <core name>.h
531 out_txt = out_txt + f_in.read() + '\n'
532 for construct in struct_groups:
533 declaration = construct.group(1)
534 struct_type_name = get_struct_type_name(declaration)
535 if 3 > len(struct_type_name): # no language specifier
536 new_decl = re.sub(re.escape(struct_type_name[1]), struct_type_name[1] + lang_low, declaration)
537 else:
538 new_decl = re.sub(re.escape(struct_type_name[2]), lang_low, declaration)
539 if '_us' != struct_type_name[2]:
540 continue
541
542 p = cor.p_info
543 if 'retro_core_option_v2_category' == struct_type_name[0]:
544 p = cor.p_info_cat
545 offset_construct = construct.start(0)
546 start = construct.end(1) - offset_construct
547 end = construct.start(2) - offset_construct
548 out_txt = out_txt + new_decl + construct.group(0)[start:end]
549
550 content = construct.group(2)
551 new_content = cor.p_option.sub(replace_option, content)
552
553 start = construct.end(2) - offset_construct
554 out_txt = out_txt + new_content + construct.group(0)[start:] + '\n'
555
556 if 'retro_core_option_v2_definition' == struct_type_name[0]:
557 out_txt = out_txt + f'struct retro_core_options_v2 options{lang_low}' \
558 ' = {\n' \
559 f' option_cats{lang_low},\n' \
560 f' option_defs{lang_low}\n' \
561 '};\n\n'
562 # shutil.rmtree(JOINER.join((intl_dir_path, folder)))
563
564 with open(intl_file_path, 'w', encoding='utf-8') as intl:
565 intl.write(out_txt + '\n#ifdef __cplusplus\n'
566 '}\n#endif\n'
567 '\n#endif')
568 return
569
570
571# -------------------- MAIN -------------------- #
572
573if __name__ == '__main__':
574 #
575 try:
576 if os.path.isfile(sys.argv[1]):
577 _temp = os.path.dirname(sys.argv[1])
578 else:
579 _temp = sys.argv[1]
580 while _temp.endswith('/') or _temp.endswith('\\'):
581 _temp = _temp[:-1]
582 TARGET_DIR_PATH = _temp
583 except IndexError:
584 TARGET_DIR_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
585 print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
586
587 DIR_PATH = os.path.dirname(os.path.realpath(__file__))
588 H_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
589 INTL_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options_intl.h')
590
591 _core_name = 'core_options'
592 try:
593 print('Getting texts from libretro_core_options.h')
594 with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
595 _main_text = _h_file.read()
596 _hash_n_str = get_texts(_main_text)
597 _files = create_msg_hash(DIR_PATH, _core_name, _hash_n_str)
598 _source_jsons = h2json(_files)
599 except Exception as e:
600 print(e)
601
602 print('Getting texts from libretro_core_options_intl.h')
603 with open(INTL_FILE_PATH, 'r+', encoding='utf-8') as _intl_file:
604 _intl_text = _intl_file.read()
605 _hash_n_str_intl = get_texts(_intl_text)
606 _intl_files = create_msg_hash(DIR_PATH, _core_name, _hash_n_str_intl)
607 _intl_jsons = h2json(_intl_files)
608
609 print('\nAll done!')