2023-10-06 11:39:15 +00:00
|
|
|
|
import itertools
|
2023-05-22 12:20:31 +00:00
|
|
|
|
import os
|
2023-06-12 16:24:38 +00:00
|
|
|
|
import re
|
2023-10-06 11:39:15 +00:00
|
|
|
|
from collections import defaultdict
|
2024-02-06 18:32:40 +00:00
|
|
|
|
from functools import cache
|
2023-10-18 16:23:34 +00:00
|
|
|
|
from hashlib import md5
|
2023-05-22 12:20:31 +00:00
|
|
|
|
|
2024-02-06 18:32:40 +00:00
|
|
|
|
from dev_tools.keywords.base import TextMap, UI_LANGUAGES, replace_templates, text_to_variable
|
2023-05-22 12:20:31 +00:00
|
|
|
|
from module.base.code_generator import CodeGenerator
|
2023-06-12 16:43:57 +00:00
|
|
|
|
from module.config.utils import deep_get, read_file
|
2023-05-22 12:20:31 +00:00
|
|
|
|
from module.logger import logger
|
|
|
|
|
|
2023-06-12 16:43:57 +00:00
|
|
|
|
|
2023-08-10 09:12:17 +00:00
|
|
|
|
def blessing_name(name: str) -> str:
|
|
|
|
|
name = text_to_variable(name)
|
|
|
|
|
name = re.sub(r'^\d', lambda match: f"_{match.group(0)}", name)
|
|
|
|
|
return name
|
|
|
|
|
|
|
|
|
|
|
2023-05-22 12:20:31 +00:00
|
|
|
|
class KeywordExtract:
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.text_map: dict[str, TextMap] = {lang: TextMap(lang) for lang in UI_LANGUAGES}
|
2024-10-24 16:38:02 +00:00
|
|
|
|
# self.text_map['cn'] = TextMap('chs')
|
2023-05-22 12:20:31 +00:00
|
|
|
|
self.keywords_id: list[int] = []
|
|
|
|
|
|
2023-06-12 16:43:57 +00:00
|
|
|
|
def find_keyword(self, keyword, lang) -> tuple[int, str]:
|
|
|
|
|
"""
|
|
|
|
|
Args:
|
|
|
|
|
keyword: text string or text id
|
|
|
|
|
lang: Language to find
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
text id (hash in TextMap)
|
|
|
|
|
text
|
|
|
|
|
"""
|
2023-05-22 12:20:31 +00:00
|
|
|
|
text_map = self.text_map[lang]
|
|
|
|
|
return text_map.find(keyword)
|
|
|
|
|
|
2023-06-12 16:43:57 +00:00
|
|
|
|
def load_keywords(self, keywords: list[str | int], lang='cn'):
|
2023-05-22 12:20:31 +00:00
|
|
|
|
text_map = self.text_map[lang]
|
2023-12-27 05:09:44 +00:00
|
|
|
|
keywords_id = [text_map.find(keyword) for keyword in keywords]
|
|
|
|
|
self.keywords_id = [keyword[0] for keyword in keywords_id if keyword[0] != 0 and keyword[1].strip()]
|
2023-07-02 07:33:30 +00:00
|
|
|
|
|
|
|
|
|
def clear_keywords(self):
|
|
|
|
|
self.keywords_id = []
|
2023-05-22 12:20:31 +00:00
|
|
|
|
|
|
|
|
|
def write_keywords(
|
|
|
|
|
self,
|
|
|
|
|
keyword_class,
|
2023-07-02 07:33:30 +00:00
|
|
|
|
output_file: str = '',
|
2023-06-12 16:43:57 +00:00
|
|
|
|
text_convert=text_to_variable,
|
2023-08-10 09:12:17 +00:00
|
|
|
|
generator: CodeGenerator = None,
|
|
|
|
|
extra_attrs: dict[str, dict] = None
|
2023-05-22 12:20:31 +00:00
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Args:
|
|
|
|
|
keyword_class:
|
|
|
|
|
output_file:
|
2023-06-12 16:43:57 +00:00
|
|
|
|
text_convert:
|
2023-07-02 07:33:30 +00:00
|
|
|
|
generator: Reuse an existing code generator
|
2023-08-10 09:12:17 +00:00
|
|
|
|
extra_attrs: Extra attributes write in keywords
|
2023-05-22 12:20:31 +00:00
|
|
|
|
"""
|
2023-07-02 07:33:30 +00:00
|
|
|
|
if generator is None:
|
|
|
|
|
gen = CodeGenerator()
|
|
|
|
|
gen.Import(f"""
|
|
|
|
|
from .classes import {keyword_class}
|
|
|
|
|
""")
|
|
|
|
|
gen.CommentAutoGenerage('dev_tools.keyword_extract')
|
|
|
|
|
else:
|
|
|
|
|
gen = generator
|
|
|
|
|
|
|
|
|
|
last_id = getattr(gen, 'last_id', 0)
|
2023-08-10 09:12:17 +00:00
|
|
|
|
if extra_attrs:
|
|
|
|
|
keyword_num = len(self.keywords_id)
|
|
|
|
|
for attr_key, attr_value in extra_attrs.items():
|
|
|
|
|
if len(attr_value) != keyword_num:
|
|
|
|
|
print(f"Extra attribute {attr_key} does not match the size of keywords")
|
|
|
|
|
return
|
2023-05-22 12:20:31 +00:00
|
|
|
|
for index, keyword in enumerate(self.keywords_id):
|
2023-06-12 16:43:57 +00:00
|
|
|
|
_, name = self.find_keyword(keyword, lang='en')
|
|
|
|
|
name = text_convert(replace_templates(name))
|
|
|
|
|
with gen.Object(key=name, object_class=keyword_class):
|
2023-07-02 07:33:30 +00:00
|
|
|
|
gen.ObjectAttr(key='id', value=index + last_id + 1)
|
2023-06-12 16:43:57 +00:00
|
|
|
|
gen.ObjectAttr(key='name', value=name)
|
2023-05-22 12:20:31 +00:00
|
|
|
|
for lang in UI_LANGUAGES:
|
2023-06-12 16:24:38 +00:00
|
|
|
|
gen.ObjectAttr(key=lang, value=replace_templates(self.find_keyword(keyword, lang=lang)[1]))
|
2023-08-10 09:12:17 +00:00
|
|
|
|
if extra_attrs:
|
|
|
|
|
for attr_key, attr_value in extra_attrs.items():
|
|
|
|
|
gen.ObjectAttr(key=attr_key, value=attr_value[keyword])
|
2023-07-02 07:33:30 +00:00
|
|
|
|
gen.last_id = index + last_id + 1
|
2023-05-22 12:20:31 +00:00
|
|
|
|
|
2023-07-02 07:33:30 +00:00
|
|
|
|
if output_file:
|
|
|
|
|
print(f'Write {output_file}')
|
|
|
|
|
gen.write(output_file)
|
|
|
|
|
self.clear_keywords()
|
|
|
|
|
return gen
|
2023-05-22 12:20:31 +00:00
|
|
|
|
|
2023-08-02 11:07:46 +00:00
|
|
|
|
def load_quests(self, quests, lang='cn'):
|
|
|
|
|
"""
|
|
|
|
|
Load a set of quest keywords
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
quests: iterable quest id collection
|
|
|
|
|
lang:
|
|
|
|
|
|
|
|
|
|
"""
|
2023-06-12 16:24:38 +00:00
|
|
|
|
quest_data = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'QuestData.json'))
|
2024-07-31 06:28:28 +00:00
|
|
|
|
quest_data = {
|
|
|
|
|
str(deep_get(data, 'QuestID')): data
|
|
|
|
|
for data in quest_data
|
|
|
|
|
}
|
2023-08-02 11:07:46 +00:00
|
|
|
|
quests_hash = [quest_data[str(quest_id)]["QuestTitle"]["Hash"] for quest_id in quests]
|
|
|
|
|
quest_keywords = list(dict.fromkeys([self.text_map[lang].find(quest_hash)[1] for quest_hash in quests_hash]))
|
2023-06-12 16:24:38 +00:00
|
|
|
|
self.load_keywords(quest_keywords, lang)
|
|
|
|
|
|
2023-12-27 15:28:55 +00:00
|
|
|
|
def write_daily_quest_keywords(self):
|
|
|
|
|
text_convert = text_to_variable
|
|
|
|
|
keyword_class = 'DailyQuest'
|
|
|
|
|
gen = CodeGenerator()
|
|
|
|
|
gen.Import(f"""
|
|
|
|
|
from .classes import {keyword_class}
|
|
|
|
|
""")
|
|
|
|
|
gen.CommentAutoGenerage('dev_tools.keyword_extract')
|
|
|
|
|
|
|
|
|
|
old_quest = [
|
2024-07-31 07:03:51 +00:00
|
|
|
|
"Go_on_assignment_1_time", # -> Dispatch_1_assignments
|
|
|
|
|
"Complete_Simulated_Universe_1_times", # same
|
|
|
|
|
"Complete_1_stage_in_Simulated_Universe_Any_world",
|
|
|
|
|
# -> Complete_Divergent_Universe_or_Simulated_Universe_1_times
|
|
|
|
|
"Complete_Calyx_Crimson_1_time", # -> Clear_Calyx_Crimson_1_times
|
|
|
|
|
"Enter_combat_by_attacking_enemy_Weakness_and_win_3_times",
|
|
|
|
|
# -> Enter_combat_by_attacking_enemie_Weakness_and_win_1_times
|
|
|
|
|
"Use_Technique_2_times", # -> Use_Technique_1_times
|
|
|
|
|
"Destroy_3_destructible_objects", # -> Destroy_1_destructible_objects
|
|
|
|
|
"Obtain_victory_in_combat_with_Support_Characters_1_time",
|
|
|
|
|
# -> Obtain_victory_in_combat_with_Support_Characters_1_times
|
|
|
|
|
"Level_up_any_character_1_time", # -> Level_up_any_character_1_times
|
|
|
|
|
"Level_up_any_Light_Cone_1_time", # -> Level_up_any_Light_Cone_1_times
|
|
|
|
|
"Synthesize_Consumable_1_time", # -> Use_the_Omni_Synthesizer_1_times
|
|
|
|
|
"Synthesize_material_1_time", # -> Use_the_Omni_Synthesizer_1_times
|
|
|
|
|
"Take_1_photo", # -> Take_photos_1_times
|
|
|
|
|
"Level_up_any_Relic_1_time", # -> Level_up_any_Relic_1_times
|
2023-12-27 15:28:55 +00:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
correct_times = {
|
2024-07-31 07:03:51 +00:00
|
|
|
|
# "Dispatch_1_assignments": 1,
|
|
|
|
|
# "Complete_Divergent_Universe_or_Simulated_Universe_1_times": 1,
|
|
|
|
|
# "Clear_Calyx_Crimson_1_times": 1,
|
2023-12-27 15:28:55 +00:00
|
|
|
|
"Enter_combat_by_attacking_enemie_Weakness_and_win_1_times": 3,
|
|
|
|
|
"Use_Technique_1_times": 2,
|
|
|
|
|
"Destroy_1_destructible_objects": 3,
|
2024-07-31 07:03:51 +00:00
|
|
|
|
# "Obtain_victory_in_combat_with_Support_Characters_1_times": 1,
|
|
|
|
|
# "Level_up_any_character_1_times": 1,
|
|
|
|
|
# "Level_up_any_Light_Cone_1_times": 1,
|
|
|
|
|
# "Use_the_Omni_Synthesizer_1_times": 1,
|
|
|
|
|
# "Take_photos_1_times": 1,
|
|
|
|
|
# "Level_up_any_Relic_1_times": 1,
|
2023-12-27 15:28:55 +00:00
|
|
|
|
"Consume_1_Trailblaze_Power": 120
|
2023-12-27 17:32:06 +00:00
|
|
|
|
|
2023-12-27 15:28:55 +00:00
|
|
|
|
}
|
2024-07-31 07:03:51 +00:00
|
|
|
|
|
|
|
|
|
def replace_templates_quest(text: str, correct_time=1) -> str:
|
2023-12-27 15:28:55 +00:00
|
|
|
|
text = replace_templates(text)
|
|
|
|
|
text = text.replace('1', f'{correct_time}')
|
|
|
|
|
return text
|
2024-01-06 11:10:35 +00:00
|
|
|
|
|
2023-12-27 15:28:55 +00:00
|
|
|
|
last_id = getattr(gen, 'last_id', 0)
|
|
|
|
|
for index, keyword in enumerate(self.keywords_id):
|
|
|
|
|
_, old_name = self.find_keyword(keyword, lang='en')
|
|
|
|
|
old_name = text_convert(replace_templates(old_name))
|
|
|
|
|
if old_name in old_quest:
|
|
|
|
|
continue
|
|
|
|
|
name = old_name.replace('1', str(correct_times.setdefault(old_name, 1)))
|
2024-01-06 11:10:35 +00:00
|
|
|
|
|
2023-12-27 15:28:55 +00:00
|
|
|
|
with gen.Object(key=name, object_class=keyword_class):
|
|
|
|
|
gen.ObjectAttr(key='id', value=index + last_id + 1)
|
|
|
|
|
gen.ObjectAttr(key='name', value=name)
|
|
|
|
|
for lang in UI_LANGUAGES:
|
2024-07-31 07:03:51 +00:00
|
|
|
|
gen.ObjectAttr(key=lang, value=replace_templates_quest(self.find_keyword(keyword, lang=lang)[1],
|
|
|
|
|
correct_times.setdefault(old_name, 1)))
|
2023-12-27 15:28:55 +00:00
|
|
|
|
gen.last_id = index + last_id + 1
|
|
|
|
|
|
|
|
|
|
output_file = './tasks/daily/keywords/daily_quest.py'
|
|
|
|
|
print(f'Write {output_file}')
|
|
|
|
|
gen.write(output_file)
|
|
|
|
|
self.clear_keywords()
|
|
|
|
|
return gen
|
|
|
|
|
|
2023-08-02 11:07:46 +00:00
|
|
|
|
def generate_daily_quests(self):
|
|
|
|
|
daily_quest = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'DailyQuest.json'))
|
2024-07-31 06:56:38 +00:00
|
|
|
|
self.load_quests([str(deep_get(data, 'DailyID')) for data in daily_quest])
|
2023-12-27 15:28:55 +00:00
|
|
|
|
self.write_daily_quest_keywords()
|
2023-08-02 11:07:46 +00:00
|
|
|
|
|
2023-10-09 03:36:56 +00:00
|
|
|
|
def generate_shadow_with_characters(self):
|
|
|
|
|
# Damage type -> damage hash
|
|
|
|
|
damage_info = dict()
|
2024-07-31 06:28:28 +00:00
|
|
|
|
for data in read_file(os.path.join(
|
2024-07-31 07:03:51 +00:00
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
|
|
|
|
'DamageType.json'
|
2024-07-31 06:28:28 +00:00
|
|
|
|
)):
|
|
|
|
|
type_name = deep_get(data, 'ID', 0)
|
2023-10-09 03:36:56 +00:00
|
|
|
|
damage_info[type_name] = deep_get(data, 'DamageTypeName.Hash')
|
|
|
|
|
# Character id -> character hash & damage type
|
|
|
|
|
character_info = dict()
|
|
|
|
|
for data in read_file(os.path.join(
|
2024-07-31 07:03:51 +00:00
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
|
|
|
|
'AvatarConfig.json'
|
2024-07-31 06:28:28 +00:00
|
|
|
|
)):
|
2024-10-24 16:38:02 +00:00
|
|
|
|
voice = deep_get(data, 'AvatarVOTag', default='')
|
|
|
|
|
if voice == 'test':
|
|
|
|
|
continue
|
2023-10-09 03:36:56 +00:00
|
|
|
|
name_hash = deep_get(data, 'AvatarName.Hash')
|
|
|
|
|
damage_type = deep_get(data, 'DamageType')
|
|
|
|
|
character_info[data['AvatarID']] = (
|
|
|
|
|
name_hash, damage_info[damage_type])
|
|
|
|
|
# Item id -> character id
|
|
|
|
|
promotion_info = defaultdict(list)
|
2024-07-31 07:03:51 +00:00
|
|
|
|
|
|
|
|
|
def merge_same(data: list[dict], keyword) -> list:
|
2024-07-31 06:28:28 +00:00
|
|
|
|
mp = defaultdict(dict)
|
|
|
|
|
for d in data:
|
|
|
|
|
length = len(mp[d[keyword]])
|
|
|
|
|
mp[d[keyword]][str(length)] = d
|
|
|
|
|
return mp.values()
|
2024-07-31 07:03:51 +00:00
|
|
|
|
|
2024-07-31 06:28:28 +00:00
|
|
|
|
for data in merge_same(read_file(os.path.join(
|
2024-07-31 07:03:51 +00:00
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
|
|
|
|
'AvatarPromotionConfig.json'
|
|
|
|
|
)), keyword='AvatarID'):
|
2023-10-09 03:36:56 +00:00
|
|
|
|
character_id = deep_get(data, '0.AvatarID')
|
|
|
|
|
item_id = deep_get(data, '2.PromotionCostList')[-1]['ItemID']
|
2024-10-24 16:38:02 +00:00
|
|
|
|
try:
|
|
|
|
|
promotion_info[item_id].append(character_info[character_id])
|
|
|
|
|
except KeyError:
|
|
|
|
|
pass
|
2023-10-09 03:36:56 +00:00
|
|
|
|
# Shadow hash -> item id
|
|
|
|
|
shadow_info = dict()
|
2024-07-31 06:28:28 +00:00
|
|
|
|
for data in merge_same(read_file(os.path.join(
|
2024-07-31 07:03:51 +00:00
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
|
|
|
|
'MappingInfo.json'
|
|
|
|
|
)), keyword='ID'):
|
2023-10-09 03:36:56 +00:00
|
|
|
|
farm_type = deep_get(data, '0.FarmType')
|
|
|
|
|
if farm_type != 'ELEMENT':
|
|
|
|
|
continue
|
|
|
|
|
shadow_hash = deep_get(data, '0.Name.Hash')
|
|
|
|
|
item_id = deep_get(data, '5.DisplayItemList')[-1]['ItemID']
|
|
|
|
|
shadow_info[shadow_hash] = promotion_info[item_id]
|
|
|
|
|
prefix_dict = {
|
|
|
|
|
'cn': '角色晋阶材料:',
|
|
|
|
|
'cht': '角色晉階材料:',
|
|
|
|
|
'jp': 'キャラクター昇格素材:',
|
|
|
|
|
'en': 'Ascension: ',
|
|
|
|
|
'es': 'Ascension: '
|
|
|
|
|
}
|
|
|
|
|
keyword_class = 'DungeonDetailed'
|
|
|
|
|
output_file = './tasks/dungeon/keywords/dungeon_detailed.py'
|
|
|
|
|
gen = CodeGenerator()
|
|
|
|
|
gen.Import(f"""
|
|
|
|
|
from .classes import {keyword_class}
|
|
|
|
|
""")
|
|
|
|
|
gen.CommentAutoGenerage('dev_tools.keyword_extract')
|
|
|
|
|
for index, (keyword, characters) in enumerate(shadow_info.items()):
|
|
|
|
|
_, name = self.find_keyword(keyword, lang='en')
|
|
|
|
|
name = text_to_variable(name).replace('Shape_of_', '')
|
|
|
|
|
with gen.Object(key=name, object_class=keyword_class):
|
|
|
|
|
gen.ObjectAttr(key='id', value=index + 1)
|
|
|
|
|
gen.ObjectAttr(key='name', value=name)
|
|
|
|
|
for lang in UI_LANGUAGES:
|
2024-07-31 11:26:20 +00:00
|
|
|
|
character_names = [
|
2024-02-06 07:10:15 +00:00
|
|
|
|
replace_templates(self.find_keyword(c[0], lang)[1])
|
2023-10-09 03:36:56 +00:00
|
|
|
|
for c in characters
|
2024-07-31 11:26:20 +00:00
|
|
|
|
]
|
|
|
|
|
character_names = list(dict.fromkeys(character_names))
|
|
|
|
|
character_names = ' / '.join(character_names)
|
2023-10-09 03:36:56 +00:00
|
|
|
|
damage_type = self.find_keyword(characters[0][1], lang)[1]
|
|
|
|
|
if lang in {'en', 'es'}:
|
|
|
|
|
value = f'{prefix_dict[lang]}{damage_type} ({character_names})'
|
|
|
|
|
else:
|
|
|
|
|
value = f'{prefix_dict[lang]}{damage_type}({character_names})'
|
|
|
|
|
gen.ObjectAttr(key=lang, value=value)
|
|
|
|
|
print(f'Write {output_file}')
|
|
|
|
|
gen.write(output_file)
|
|
|
|
|
self.clear_keywords()
|
|
|
|
|
|
2023-06-29 16:32:33 +00:00
|
|
|
|
def generate_forgotten_hall_stages(self):
|
|
|
|
|
keyword_class = "ForgottenHallStage"
|
|
|
|
|
output_file = './tasks/forgotten_hall/keywords/stage.py'
|
|
|
|
|
gen = CodeGenerator()
|
|
|
|
|
gen.Import(f"""
|
|
|
|
|
from .classes import {keyword_class}
|
|
|
|
|
""")
|
|
|
|
|
gen.CommentAutoGenerage('dev_tools.keyword_extract')
|
|
|
|
|
for stage_id in range(1, 16):
|
|
|
|
|
id_str = str(stage_id).rjust(2, '0')
|
|
|
|
|
with gen.Object(key=f"Stage_{stage_id}", object_class=keyword_class):
|
|
|
|
|
gen.ObjectAttr(key='id', value=stage_id)
|
|
|
|
|
gen.ObjectAttr(key='name', value=id_str)
|
|
|
|
|
for lang in UI_LANGUAGES:
|
|
|
|
|
gen.ObjectAttr(key=lang, value=id_str)
|
|
|
|
|
|
|
|
|
|
print(f'Write {output_file}')
|
|
|
|
|
gen.write(output_file)
|
2023-07-02 07:33:30 +00:00
|
|
|
|
self.clear_keywords()
|
2023-06-29 16:32:33 +00:00
|
|
|
|
|
2023-10-06 11:39:15 +00:00
|
|
|
|
def generate_assignments(self):
|
2024-02-07 13:43:20 +00:00
|
|
|
|
from dev_tools.keywords.assignment import GenerateAssignment
|
|
|
|
|
GenerateAssignment()()
|
2023-06-19 00:39:41 +00:00
|
|
|
|
|
2023-07-02 07:33:30 +00:00
|
|
|
|
def generate_map_planes(self):
|
2024-02-06 18:32:40 +00:00
|
|
|
|
from dev_tools.keywords.map_world import GenerateMapWorld
|
|
|
|
|
GenerateMapWorld()()
|
|
|
|
|
from dev_tools.keywords.map_plane import GenerateMapPlane
|
|
|
|
|
GenerateMapPlane()()
|
2023-07-02 07:33:30 +00:00
|
|
|
|
|
2023-07-24 10:15:22 +00:00
|
|
|
|
def generate_character_keywords(self):
|
2024-10-24 16:38:02 +00:00
|
|
|
|
from dev_tools.keywords.character import GenerateCharacterList, GenerateCharacterHeight, GenerateCombatType
|
|
|
|
|
GenerateCharacterList()()
|
|
|
|
|
GenerateCharacterHeight()()
|
|
|
|
|
GenerateCombatType()()
|
2024-02-06 20:33:12 +00:00
|
|
|
|
|
2023-08-02 11:07:46 +00:00
|
|
|
|
def generate_battle_pass_quests(self):
|
|
|
|
|
battle_pass_quests = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'BattlePassConfig.json'))
|
2024-07-31 06:28:28 +00:00
|
|
|
|
latest_quests = list(battle_pass_quests)[-1]
|
2023-12-27 05:09:44 +00:00
|
|
|
|
week_quest_list = deep_get(latest_quests, "WeekQuestList")
|
|
|
|
|
week_order1 = deep_get(latest_quests, "WeekOrder1")
|
|
|
|
|
week_chain_quest_list = deep_get(latest_quests, "WeekChainQuestList")
|
|
|
|
|
quests = week_quest_list + week_order1 + week_chain_quest_list
|
2023-08-02 11:07:46 +00:00
|
|
|
|
self.load_quests(quests)
|
|
|
|
|
self.write_keywords(keyword_class='BattlePassQuest', output_file='./tasks/battle_pass/keywords/quest.py')
|
|
|
|
|
|
2023-08-10 09:12:17 +00:00
|
|
|
|
def generate_rogue_buff(self):
|
|
|
|
|
# paths
|
2023-09-11 04:11:23 +00:00
|
|
|
|
aeons = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueAeonDisplay.json'))
|
2024-07-31 06:28:28 +00:00
|
|
|
|
aeons_hash = [deep_get(aeon, 'RogueAeonPathName2.Hash') for aeon in aeons]
|
2023-08-10 09:12:17 +00:00
|
|
|
|
self.keywords_id = aeons_hash
|
|
|
|
|
self.write_keywords(keyword_class='RoguePath', output_file='./tasks/rogue/keywords/path.py')
|
|
|
|
|
|
|
|
|
|
# blessings
|
|
|
|
|
blessings_info = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueBuff.json'))
|
|
|
|
|
blessings_name_map = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueMazeBuff.json'))
|
2024-07-31 06:28:28 +00:00
|
|
|
|
blessings_name_map = {
|
|
|
|
|
deep_get(data, 'ID'): data
|
|
|
|
|
for data in blessings_name_map
|
|
|
|
|
}
|
|
|
|
|
blessings_id = [deep_get(blessing, 'MazeBuffID') for blessing in blessings_info
|
|
|
|
|
if not deep_get(blessing, 'AeonID')][1:]
|
|
|
|
|
resonances_id = [deep_get(blessing, 'MazeBuffID') for blessing in blessings_info
|
|
|
|
|
if deep_get(blessing, 'AeonID')]
|
|
|
|
|
blessings_info = {
|
|
|
|
|
str(deep_get(data, 'MazeBuffID')): data
|
|
|
|
|
for data in blessings_info if deep_get(data, 'MazeBuffLevel') == 1
|
|
|
|
|
}
|
2023-08-10 09:12:17 +00:00
|
|
|
|
|
2024-01-06 11:10:35 +00:00
|
|
|
|
# ignore endless buffs
|
|
|
|
|
endless_buffs = read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueEndlessMegaBuffDesc.json'))
|
2024-07-31 07:03:51 +00:00
|
|
|
|
endless_buff_ids = [int(deep_get(data, 'MazeBuffID')) for data in endless_buffs]
|
2024-01-06 11:10:35 +00:00
|
|
|
|
blessings_id = [id_ for id_ in blessings_id if id_ not in endless_buff_ids]
|
|
|
|
|
|
2023-08-18 12:22:46 +00:00
|
|
|
|
def get_blessing_infos(id_list, with_enhancement: bool):
|
2024-07-31 06:28:28 +00:00
|
|
|
|
blessings_hash = [deep_get(blessings_name_map, f"{blessing_id}.BuffName.Hash")
|
2023-08-10 09:12:17 +00:00
|
|
|
|
for blessing_id in id_list]
|
2024-07-31 06:28:28 +00:00
|
|
|
|
blessings_path_id = {blessing_hash: int(deep_get(blessings_info, f'{blessing_id}.RogueBuffType')) - 119
|
2023-08-10 09:12:17 +00:00
|
|
|
|
# 119 is the magic number make type match with path in keyword above
|
|
|
|
|
for blessing_hash, blessing_id in zip(blessings_hash, id_list)}
|
2024-07-31 06:28:28 +00:00
|
|
|
|
blessings_category = {blessing_hash: deep_get(blessings_info, f'{blessing_id}.RogueBuffCategory')
|
2024-07-31 07:03:51 +00:00
|
|
|
|
for blessing_hash, blessing_id in zip(blessings_hash, id_list)}
|
2024-06-19 13:49:12 +00:00
|
|
|
|
category_map = {
|
|
|
|
|
"Common": 1,
|
|
|
|
|
"Rare": 2,
|
|
|
|
|
"Legendary": 3,
|
|
|
|
|
}
|
|
|
|
|
blessings_rarity = {blessing_hash: category_map[blessing_category]
|
|
|
|
|
for blessing_hash, blessing_category in blessings_category.items()}
|
2023-08-18 12:22:46 +00:00
|
|
|
|
enhancement = {blessing_hash: "" for blessing_hash in blessings_hash}
|
|
|
|
|
if with_enhancement:
|
|
|
|
|
return blessings_hash, {'path_id': blessings_path_id, 'rarity': blessings_rarity,
|
|
|
|
|
'enhancement': enhancement}
|
|
|
|
|
else:
|
|
|
|
|
return blessings_hash, {'path_id': blessings_path_id, 'rarity': blessings_rarity}
|
|
|
|
|
|
|
|
|
|
hash_list, extra_attrs = get_blessing_infos(blessings_id, with_enhancement=True)
|
2023-08-10 09:12:17 +00:00
|
|
|
|
self.keywords_id = hash_list
|
|
|
|
|
self.write_keywords(keyword_class='RogueBlessing', output_file='./tasks/rogue/keywords/blessing.py',
|
|
|
|
|
text_convert=blessing_name, extra_attrs=extra_attrs)
|
|
|
|
|
|
2023-08-18 12:22:46 +00:00
|
|
|
|
hash_list, extra_attrs = get_blessing_infos(resonances_id, with_enhancement=False)
|
2023-08-10 09:12:17 +00:00
|
|
|
|
self.keywords_id = hash_list
|
|
|
|
|
self.write_keywords(keyword_class='RogueResonance', output_file='./tasks/rogue/keywords/resonance.py',
|
|
|
|
|
text_convert=blessing_name, extra_attrs=extra_attrs)
|
|
|
|
|
|
2023-10-07 14:11:47 +00:00
|
|
|
|
def generate_rogue_events(self):
|
2023-10-18 16:23:34 +00:00
|
|
|
|
# An event contains several options
|
2023-10-06 11:39:15 +00:00
|
|
|
|
event_title_file = os.path.join(
|
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
|
|
|
|
'RogueTalkNameConfig.json'
|
|
|
|
|
)
|
|
|
|
|
event_title_ids = {
|
2024-07-31 06:28:28 +00:00
|
|
|
|
str(deep_get(data, 'TalkNameID')): deep_get(data, 'Name.Hash')
|
|
|
|
|
for data in read_file(event_title_file)
|
2023-10-06 11:39:15 +00:00
|
|
|
|
}
|
|
|
|
|
event_title_texts = defaultdict(list)
|
|
|
|
|
for title_id, title_hash in event_title_ids.items():
|
|
|
|
|
if title_hash not in self.text_map['en']:
|
|
|
|
|
continue
|
|
|
|
|
_, title_text = self.find_keyword(title_hash, lang='en')
|
|
|
|
|
event_title_texts[text_to_variable(title_text)].append(title_id)
|
|
|
|
|
option_file = os.path.join(
|
|
|
|
|
TextMap.DATA_FOLDER, 'ExcelOutput',
|
2024-06-19 13:49:12 +00:00
|
|
|
|
'RogueDialogueOptionDisplay.json'
|
2023-10-06 11:39:15 +00:00
|
|
|
|
)
|
|
|
|
|
option_ids = {
|
2024-07-31 06:28:28 +00:00
|
|
|
|
str(deep_get(data, 'OptionDisplayID')): deep_get(data, 'OptionTitle.Hash')
|
|
|
|
|
for data in read_file(option_file)
|
2023-10-06 11:39:15 +00:00
|
|
|
|
}
|
2023-10-18 16:23:34 +00:00
|
|
|
|
# Key: event name hash, value: list of option id/hash
|
2023-10-06 11:39:15 +00:00
|
|
|
|
options_grouped = dict()
|
2023-10-18 16:23:34 +00:00
|
|
|
|
# Key: option md5, value: option text hash in StarRailData
|
|
|
|
|
option_md5s = dict()
|
|
|
|
|
|
|
|
|
|
@cache
|
|
|
|
|
def get_option_md5(option_hash):
|
|
|
|
|
m = md5()
|
|
|
|
|
for lang in UI_LANGUAGES:
|
|
|
|
|
option_text = self.find_keyword(option_hash, lang=lang)[1]
|
|
|
|
|
m.update(option_text.encode())
|
|
|
|
|
return m.hexdigest()
|
2023-10-06 11:39:15 +00:00
|
|
|
|
|
|
|
|
|
# Drop invalid or duplicate options
|
|
|
|
|
def clean_options(options):
|
|
|
|
|
visited = set()
|
|
|
|
|
for i in options:
|
|
|
|
|
option_hash = option_ids[str(i)]
|
|
|
|
|
if option_hash not in self.text_map['en']:
|
|
|
|
|
continue
|
2023-10-18 16:23:34 +00:00
|
|
|
|
option_md5 = get_option_md5(option_hash)
|
|
|
|
|
if option_md5 in visited:
|
2023-10-06 11:39:15 +00:00
|
|
|
|
continue
|
2023-10-18 16:23:34 +00:00
|
|
|
|
if option_md5 not in option_md5s:
|
|
|
|
|
option_md5s[option_md5] = option_hash
|
|
|
|
|
visited.add(option_md5)
|
|
|
|
|
yield option_md5s[option_md5]
|
|
|
|
|
|
2023-10-06 11:39:15 +00:00
|
|
|
|
for group_title_ids in event_title_texts.values():
|
|
|
|
|
group_option_ids = []
|
|
|
|
|
for title_id in group_title_ids:
|
2023-10-07 13:52:41 +00:00
|
|
|
|
# Special case for Nildis (尼尔迪斯牌)
|
|
|
|
|
# Missing option: Give up
|
|
|
|
|
if title_id == '13501':
|
|
|
|
|
group_option_ids.append(13506)
|
2023-10-06 11:39:15 +00:00
|
|
|
|
option_id = title_id
|
2023-10-18 16:23:34 +00:00
|
|
|
|
# title ids in Swarm Disaster (寰宇蝗灾) have a "1" prefix
|
2023-10-06 11:39:15 +00:00
|
|
|
|
if option_id not in option_ids:
|
|
|
|
|
option_id = title_id[1:]
|
2023-10-18 16:23:34 +00:00
|
|
|
|
# Some title may not has corresponding options
|
2023-10-06 11:39:15 +00:00
|
|
|
|
if option_id not in option_ids:
|
|
|
|
|
continue
|
|
|
|
|
group_option_ids += list(itertools.takewhile(
|
|
|
|
|
lambda x: str(x) in option_ids,
|
|
|
|
|
itertools.count(int(option_id))
|
|
|
|
|
))
|
|
|
|
|
if group_option_ids:
|
2023-10-18 16:23:34 +00:00
|
|
|
|
title_hash = event_title_ids[group_title_ids[0]]
|
|
|
|
|
options_grouped[title_hash] = group_option_ids
|
|
|
|
|
|
|
|
|
|
for title_hash, options in options_grouped.items():
|
|
|
|
|
options_grouped[title_hash] = list(clean_options(options))
|
|
|
|
|
for title_hash in list(options_grouped.keys()):
|
|
|
|
|
if len(options_grouped[title_hash]) == 0:
|
|
|
|
|
options_grouped.pop(title_hash)
|
2023-10-06 11:39:15 +00:00
|
|
|
|
option_dup_count = defaultdict(int)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
for option_hash in option_md5s.values():
|
|
|
|
|
if option_hash not in self.text_map['en']:
|
|
|
|
|
continue
|
|
|
|
|
_, option_text = self.find_keyword(option_hash, lang='en')
|
|
|
|
|
option_dup_count[text_to_variable(option_text)] += 1
|
2023-10-06 11:39:15 +00:00
|
|
|
|
|
2023-10-18 16:23:34 +00:00
|
|
|
|
def option_text_convert(option_md5, md5_prefix_len=4):
|
2023-10-06 11:39:15 +00:00
|
|
|
|
def wrapper(option_text):
|
|
|
|
|
option_var = text_to_variable(option_text)
|
|
|
|
|
if option_dup_count[option_var] > 1:
|
2023-10-18 16:23:34 +00:00
|
|
|
|
option_var = f'{option_var}_{option_md5[:md5_prefix_len]}'
|
2023-10-06 11:39:15 +00:00
|
|
|
|
return option_var
|
2024-07-31 07:03:51 +00:00
|
|
|
|
|
2023-10-06 11:39:15 +00:00
|
|
|
|
return wrapper
|
2024-07-31 07:03:51 +00:00
|
|
|
|
|
2023-10-06 11:39:15 +00:00
|
|
|
|
option_gen = None
|
2023-10-18 16:23:34 +00:00
|
|
|
|
option_hash_to_keyword_id = dict() # option hash -> option keyword id
|
|
|
|
|
for i, (option_md5, option_hash) in enumerate(option_md5s.items(), start=1):
|
|
|
|
|
self.load_keywords([option_hash])
|
2023-10-06 11:39:15 +00:00
|
|
|
|
option_gen = self.write_keywords(
|
|
|
|
|
keyword_class='RogueEventOption',
|
2023-10-18 16:23:34 +00:00
|
|
|
|
text_convert=option_text_convert(option_md5),
|
2023-10-06 11:39:15 +00:00
|
|
|
|
generator=option_gen
|
|
|
|
|
)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
option_hash_to_keyword_id[option_hash] = i
|
2023-10-06 11:39:15 +00:00
|
|
|
|
output_file = './tasks/rogue/keywords/event_option.py'
|
|
|
|
|
print(f'Write {output_file}')
|
|
|
|
|
option_gen.write(output_file)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
|
|
|
|
|
# title hash -> option keyword id
|
|
|
|
|
title_to_option_keyword_id = {
|
|
|
|
|
title_hash: sorted(
|
|
|
|
|
option_hash_to_keyword_id[x] for x in option_hashes
|
|
|
|
|
) for title_hash, option_hashes in options_grouped.items()
|
|
|
|
|
}
|
|
|
|
|
self.load_keywords(options_grouped.keys())
|
2023-10-06 11:39:15 +00:00
|
|
|
|
self.write_keywords(
|
|
|
|
|
keyword_class='RogueEventTitle',
|
|
|
|
|
output_file='./tasks/rogue/keywords/event_title.py',
|
2023-10-18 16:23:34 +00:00
|
|
|
|
extra_attrs={'option_ids': title_to_option_keyword_id}
|
2023-10-06 11:39:15 +00:00
|
|
|
|
)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
try:
|
|
|
|
|
from tasks.rogue.event.event import OcrRogueEventOption
|
2024-03-27 11:38:30 +00:00
|
|
|
|
except AttributeError as e:
|
|
|
|
|
logger.error(e)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
logger.critical(
|
|
|
|
|
f'Importing OcrRogueEventOption fails, probably due to changes in {output_file}')
|
|
|
|
|
try:
|
|
|
|
|
from tasks.rogue.event.preset import STRATEGIES
|
2024-03-27 11:38:30 +00:00
|
|
|
|
except AttributeError as e:
|
|
|
|
|
logger.error(e)
|
2023-10-18 16:23:34 +00:00
|
|
|
|
logger.critical(
|
|
|
|
|
f'Importing preset strategies fails, probably due to changes in {output_file}')
|
2023-10-06 11:39:15 +00:00
|
|
|
|
|
2024-07-31 06:28:28 +00:00
|
|
|
|
def iter_without_duplication(self, file: list, keys):
|
2023-08-14 07:12:02 +00:00
|
|
|
|
visited = set()
|
2024-07-31 06:28:28 +00:00
|
|
|
|
for data in file:
|
2023-08-14 07:12:02 +00:00
|
|
|
|
hash_ = deep_get(data, keys=keys)
|
|
|
|
|
_, name = self.find_keyword(hash_, lang='cn')
|
|
|
|
|
if name in visited:
|
|
|
|
|
continue
|
|
|
|
|
visited.add(name)
|
|
|
|
|
yield hash_
|
|
|
|
|
|
2023-06-12 16:43:57 +00:00
|
|
|
|
def generate(self):
|
2024-06-21 16:42:40 +00:00
|
|
|
|
self.load_keywords(['饰品提取', '差分宇宙', '模拟宇宙',
|
|
|
|
|
'拟造花萼(金)', '拟造花萼(赤)', '凝滞虚影', '侵蚀隧洞', '历战余响',
|
|
|
|
|
'最近更新', '忘却之庭', '虚构叙事', '末日幻影'])
|
2023-06-12 16:43:57 +00:00
|
|
|
|
self.write_keywords(keyword_class='DungeonNav', output_file='./tasks/dungeon/keywords/nav.py')
|
2024-06-19 18:09:26 +00:00
|
|
|
|
self.load_keywords(['行动摘要', '生存索引', '每日实训', '模拟宇宙', '逐光捡金', '战术训练'])
|
2023-06-12 16:43:57 +00:00
|
|
|
|
self.write_keywords(keyword_class='DungeonTab', output_file='./tasks/dungeon/keywords/tab.py')
|
2023-06-16 17:07:34 +00:00
|
|
|
|
self.load_keywords(['前往', '领取', '进行中', '已领取', '本日活跃度已满'])
|
|
|
|
|
self.write_keywords(keyword_class='DailyQuestState', output_file='./tasks/daily/keywords/daily_quest_state.py')
|
2023-08-02 11:07:46 +00:00
|
|
|
|
self.load_keywords(['领取', '追踪'])
|
|
|
|
|
self.write_keywords(keyword_class='BattlePassQuestState',
|
|
|
|
|
output_file='./tasks/battle_pass/keywords/quest_state.py')
|
2024-02-06 22:57:52 +00:00
|
|
|
|
self.generate_map_planes()
|
|
|
|
|
self.generate_character_keywords()
|
2024-02-06 19:05:46 +00:00
|
|
|
|
from dev_tools.keywords.dungeon_list import GenerateDungeonList
|
|
|
|
|
GenerateDungeonList()()
|
2024-01-28 05:45:04 +00:00
|
|
|
|
self.load_keywords(['进入', '传送', '追踪'])
|
2023-06-14 16:15:14 +00:00
|
|
|
|
self.write_keywords(keyword_class='DungeonEntrance', output_file='./tasks/dungeon/keywords/dungeon_entrance.py')
|
2023-10-09 03:36:56 +00:00
|
|
|
|
self.generate_shadow_with_characters()
|
2023-08-02 11:07:46 +00:00
|
|
|
|
self.load_keywords(['奖励', '任务', ])
|
2023-06-19 13:57:19 +00:00
|
|
|
|
self.write_keywords(keyword_class='BattlePassTab', output_file='./tasks/battle_pass/keywords/tab.py')
|
2023-12-27 18:03:26 +00:00
|
|
|
|
self.load_keywords(['本周任务', '本期任务'])
|
2023-08-02 11:07:46 +00:00
|
|
|
|
self.write_keywords(keyword_class='BattlePassMissionTab',
|
|
|
|
|
output_file='./tasks/battle_pass/keywords/mission_tab.py')
|
2024-10-24 16:38:02 +00:00
|
|
|
|
# self.generate_assignments()
|
2023-06-29 16:32:33 +00:00
|
|
|
|
self.generate_forgotten_hall_stages()
|
2023-08-02 11:07:46 +00:00
|
|
|
|
self.generate_daily_quests()
|
|
|
|
|
self.generate_battle_pass_quests()
|
2024-10-05 15:17:19 +00:00
|
|
|
|
self.load_keywords(['养成材料', '光锥', '遗器', '其他材料', '消耗品', '任务', '贵重物', '材料置换'])
|
2023-07-14 04:35:56 +00:00
|
|
|
|
self.write_keywords(keyword_class='ItemTab', text_convert=lambda name: name.replace(' ', ''),
|
|
|
|
|
output_file='./tasks/item/keywords/tab.py')
|
2024-05-11 10:21:28 +00:00
|
|
|
|
from dev_tools.keywords.item import generate_items
|
|
|
|
|
generate_items()
|
2023-08-10 09:12:17 +00:00
|
|
|
|
self.generate_rogue_buff()
|
2023-08-18 12:22:46 +00:00
|
|
|
|
self.load_keywords(['已强化'])
|
|
|
|
|
self.write_keywords(keyword_class='RogueEnhancement', output_file='./tasks/rogue/keywords/enhancement.py')
|
2023-08-14 07:12:02 +00:00
|
|
|
|
self.load_keywords(list(self.iter_without_duplication(
|
2023-09-11 04:11:23 +00:00
|
|
|
|
read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueMiracleDisplay.json')),
|
|
|
|
|
'MiracleName.Hash')))
|
2023-08-12 07:15:25 +00:00
|
|
|
|
self.write_keywords(keyword_class='RogueCurio', output_file='./tasks/rogue/keywords/curio.py')
|
2023-08-14 07:12:02 +00:00
|
|
|
|
self.load_keywords(list(self.iter_without_duplication(
|
|
|
|
|
read_file(os.path.join(TextMap.DATA_FOLDER, 'ExcelOutput', 'RogueBonus.json')), 'BonusTitle.Hash')))
|
|
|
|
|
self.write_keywords(keyword_class='RogueBonus', output_file='./tasks/rogue/keywords/bonus.py')
|
2023-10-07 14:11:47 +00:00
|
|
|
|
self.generate_rogue_events()
|
2023-05-22 12:20:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2024-10-24 16:38:02 +00:00
|
|
|
|
TextMap.DATA_FOLDER = '../DanhengServer-Resources'
|
2023-06-12 16:43:57 +00:00
|
|
|
|
KeywordExtract().generate()
|