Files
Shiip-of-Hakinian-Espanol/scripts/add_lus_loc.py
nickpons666 77268dbf57
Some checks failed
generate-builds / generate-soh-otr (push) Has been cancelled
generate-builds / build-macos (push) Has been cancelled
generate-builds / build-linux (push) Has been cancelled
generate-builds / build-windows (push) Has been cancelled
Convert all remaining hardcoded text to LUS_LOC
- Replace 5 hardcoded text strings with LUS_LOC() calls:
  * ResolutionEditor.cpp: 'Click to resolve' -> BUTTON_CLICK_TO_RESOLVE
  * ResolutionEditor.cpp: ' ' (space) -> TEXT_SPACE
  * SohMenuNetwork.cpp: ':' (colon) -> TEXT_COLON (2 instances)
  * UIWidgets.cpp: '+' (plus) -> TEXT_PLUS
- Add new translation keys to both en_US.json and es_ES.json:
  * BUTTON_CLICK_TO_RESOLVE: 'Click to resolve' / 'Haz clic para resolver'
  * TEXT_SPACE: ' ' / ' '
  * TEXT_COLON: ':' / ':'
  * TEXT_PLUS: '+' / '+'
- All hardcoded UI text now uses localization system
- Compilation successful with only minor format warnings

This completes the immediate task of eliminating hardcoded text strings.
2026-03-28 16:32:42 -06:00

193 lines
7.0 KiB
Python

#!/usr/bin/env python3
import re
import json
import sys
import argparse
from pathlib import Path
PROJECT_ROOT = Path(__file__).parent.parent
LANGUAGES_DIR = PROJECT_ROOT / "languages"
SOH_DIR = PROJECT_ROOT / "soh" / "soh"
LIBULTRASHIP_DIR = PROJECT_ROOT / "libultraship" / "src"
def load_translations():
with open(LANGUAGES_DIR / "en_US.json", 'r', encoding='utf-8') as f:
return json.load(f)
def generate_key(text, context_prefix=""):
"""Genera una clave única basada en el texto y contexto"""
clean = re.sub(r'[^a-zA-Z0-9\s]', '', text)
words = clean.upper().split()[:4]
base_key = '_'.join(words) if words else 'UNKNOWN'
return f"{context_prefix}_{base_key}" if context_prefix else base_key
def get_best_prefix(context, text):
"""Determina el mejor prefijo basado en el contexto"""
if 'Tooltip' in context or '.Tooltip' in context:
return 'TOOLTIP'
elif 'Button' in context or 'Selectable' in context:
return 'WIDGET'
elif 'Text' in context or 'SeparatorText' in context:
return 'TEXT'
elif 'Label' in context or 'MenuItem' in context:
return 'MENU'
return 'TEXT'
def find_or_create_key(text, context, translations):
"""Busca o crea una clave para el texto dado"""
prefix = get_best_prefix(context, text)
base_key = generate_key(text)
full_key = f"{prefix}_{base_key}"
# Intentar variaciones
if full_key in translations:
return full_key
if base_key in translations:
return base_key
if f"{prefix}_{base_key}" not in translations:
# Crear la nueva clave
translations[full_key] = text
return full_key
def process_file(filepath, translations, dry_run=False):
"""Procesa un archivo C++ para añadir LUS_LOC()"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
except Exception as e:
print(f" ⚠ Error leyendo {filepath}: {e}")
return False, 0
original = content
replacements = 0
# Patrones de búsqueda (evitando ya traducidos)
patterns = [
# Tooltips: .Tooltip("texto")
(r'(\.Tooltip\()(["\'])(?!LUS_LOC)([^"\']{5,})(["\'])(\))', 'Tooltip'),
# Selectable: ImGui::Selectable("texto")
(r'(ImGui::Selectable\()(["\'])(?!LUS_LOC)([^"\']{3,})(["\'])(\))', 'Selectable'),
# Button: ImGui::Button("texto")
(r'(ImGui::Button\()(["\'])(?!LUS_LOC)([^"\']{3,})(["\'])(\))', 'Button'),
# Text: ImGui::Text("texto")
(r'(ImGui::Text\()(["\'])(?!LUS_LOC)([^"\']{3,})(["\'])(\))', 'Text'),
# SeparatorText: ImGui::SeparatorText("texto")
(r'(ImGui::SeparatorText\()(["\'])(?!LUS_LOC)([^"\']{3,})(["\'])(\))', 'SeparatorText'),
# MenuItem: ImGui::MenuItem("texto")
(r'(ImGui::MenuItem\()(["\'])(?!LUS_LOC)([^"\']{3,})(["\'])', 'MenuItem'),
]
for pattern, context in patterns:
def replacer(match):
nonlocal replacements
prefix = match.group(1)
text = match.group(3)
suffix = match.group(5) if len(match.groups()) >= 5 else ''
key = find_or_create_key(text, context, translations)
replacements += 1
return f'{prefix}LUS_LOC("{key}").c_str(){suffix}'
content = re.sub(pattern, replacer, content)
if content != original and not dry_run:
try:
with open(filepath, 'w', encoding='utf-8') as f:
f.write(content)
return True, replacements
except Exception as e:
print(f" ⚠ Error escribiendo {filepath}: {e}")
return False, 0
return replacements > 0, replacements
def process_directory(search_dir, translations, dry_run=False):
"""Procesa todos los archivos .cpp en un directorio"""
modified_files = 0
total_replacements = 0
for cpp_file in search_dir.rglob("*.cpp"):
changed, count = process_file(cpp_file, translations, dry_run)
if changed:
modified_files += 1
total_replacements += count
mode = "[PREVIEW]" if dry_run else ""
print(f" {mode} {cpp_file.relative_to(PROJECT_ROOT)}: {count} reemplazo(s)")
return modified_files, total_replacements
def sync_languages_json(from_lang="en_US.json", to_lang="es_ES.json"):
"""Sincroniza claves entre archivos de idiomas"""
en_path = LANGUAGES_DIR / from_lang
es_path = LANGUAGES_DIR / to_lang
with open(en_path, 'r', encoding='utf-8') as f:
en_data = json.load(f)
with open(es_path, 'r', encoding='utf-8') as f:
es_data = json.load(f)
missing_keys = set(en_data.keys()) - set(es_data.keys())
added = 0
for key in missing_keys:
es_data[key] = en_data[key] # Temporal: usar inglés como placeholder
added += 1
if added > 0:
with open(es_path, 'w', encoding='utf-8') as f:
json.dump(es_data, f, indent=4, ensure_ascii=False)
print(f"\n✓ Sincronización: Añadidas {added} claves faltantes a {to_lang}")
else:
print(f"\n✓ Sincronización: {to_lang} está actualizado")
return added
def main():
parser = argparse.ArgumentParser(description='Añade LUS_LOC() a archivos C++')
parser.add_argument('--dry-run', action='store_true', help='Vista previa sin modificar archivos')
parser.add_argument('--dir', type=str, default='soh', help='Directorio a procesar: soh, libultraship, o ambos')
parser.add_argument('--sync', action='store_true', help='Sincronizar archivos JSON después')
args = parser.parse_args()
translations = load_translations()
print(f"📚 Cargadas {len(translations)} traducciones existentes\n")
if args.dry_run:
print("🔍 MODO PREVIEW (sin modificar archivos)\n")
total_modified = 0
total_replacements = 0
# Procesar directorios especificados
dirs_to_process = []
if args.dir in ['soh', 'ambos']:
dirs_to_process.append(SOH_DIR)
if args.dir in ['libultraship', 'ambos']:
dirs_to_process.append(LIBULTRASHIP_DIR)
for search_dir in dirs_to_process:
if search_dir.exists():
print(f"📂 Procesando: {search_dir.relative_to(PROJECT_ROOT)}")
modified, replacements = process_directory(search_dir, translations, args.dry_run)
total_modified += modified
total_replacements += replacements
if not args.dry_run and total_replacements > 0:
with open(LANGUAGES_DIR / "en_US.json", 'w', encoding='utf-8') as f:
json.dump(translations, f, indent=4, ensure_ascii=False)
print(f"\n✓ Actualizado en_US.json con {len(translations)} claves totales")
print(f"\n{'='*60}")
print(f"📊 Resultados: {total_modified} archivo(s), {total_replacements} reemplazo(s)")
print(f"{'='*60}")
# Sincronizar JSON si se solicita
if args.sync and not args.dry_run:
sync_languages_json()
if __name__ == "__main__":
main()