2023-05-14 07:48:34 +00:00
|
|
|
from copy import deepcopy
|
|
|
|
|
|
|
|
from cached_property import cached_property
|
|
|
|
|
|
|
|
from deploy.Windows.utils import DEPLOY_TEMPLATE, poor_yaml_read, poor_yaml_write
|
|
|
|
from module.base.timer import timer
|
2023-08-30 10:46:18 +00:00
|
|
|
from module.config.convert import *
|
2023-09-15 19:39:12 +00:00
|
|
|
from module.config.server import VALID_SERVER
|
|
|
|
from module.config.utils import *
|
2023-05-14 07:48:34 +00:00
|
|
|
|
|
|
|
CONFIG_IMPORT = '''
|
|
|
|
import datetime
|
|
|
|
|
|
|
|
# This file was automatically generated by module/config/config_updater.py.
|
|
|
|
# Don't modify it manually.
|
|
|
|
|
|
|
|
|
|
|
|
class GeneratedConfig:
|
|
|
|
"""
|
|
|
|
Auto generated configuration
|
|
|
|
"""
|
|
|
|
'''.strip().split('\n')
|
|
|
|
|
2023-06-17 16:13:21 +00:00
|
|
|
DICT_GUI_TO_INGAME = {
|
|
|
|
'zh-CN': 'cn',
|
|
|
|
'en-US': 'en',
|
|
|
|
'ja-JP': 'jp',
|
|
|
|
'zh-TW': 'cht',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def gui_lang_to_ingame_lang(lang: str) -> str:
|
|
|
|
return DICT_GUI_TO_INGAME.get(lang, 'en')
|
2023-05-14 07:48:34 +00:00
|
|
|
|
|
|
|
|
2023-08-27 09:13:05 +00:00
|
|
|
def get_generator():
|
|
|
|
from module.base.code_generator import CodeGenerator
|
|
|
|
return CodeGenerator()
|
|
|
|
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
class ConfigGenerator:
|
|
|
|
@cached_property
|
|
|
|
def argument(self):
|
|
|
|
"""
|
|
|
|
Load argument.yaml, and standardise its structure.
|
|
|
|
|
|
|
|
<group>:
|
|
|
|
<argument>:
|
|
|
|
type: checkbox|select|textarea|input
|
|
|
|
value:
|
|
|
|
option (Optional): Options, if argument has any options.
|
|
|
|
validate (Optional): datetime
|
|
|
|
"""
|
|
|
|
data = {}
|
|
|
|
raw = read_file(filepath_argument('argument'))
|
2023-08-27 09:13:05 +00:00
|
|
|
|
|
|
|
def option_add(keys, options):
|
|
|
|
options = deep_get(raw, keys=keys, default=[]) + options
|
|
|
|
deep_set(raw, keys=keys, value=options)
|
|
|
|
|
2023-09-15 19:39:12 +00:00
|
|
|
# Insert packages
|
|
|
|
option_add(keys='Emulator.PackageName.option', options=list(VALID_SERVER.keys()))
|
2023-08-27 09:13:05 +00:00
|
|
|
# Insert dungeons
|
|
|
|
from tasks.dungeon.keywords import DungeonList
|
|
|
|
option_add(
|
|
|
|
keys='Dungeon.Name.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_daily_dungeon])
|
|
|
|
# Double events
|
|
|
|
option_add(
|
|
|
|
keys='Dungeon.NameAtDoubleCalyx.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Calyx])
|
|
|
|
option_add(
|
|
|
|
keys='Dungeon.NameAtDoubleRelic.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Cavern_of_Corrosion])
|
|
|
|
# Dungeon daily
|
|
|
|
option_add(
|
|
|
|
keys='DungeonDaily.CalyxGolden.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Calyx_Golden])
|
|
|
|
option_add(
|
|
|
|
keys='DungeonDaily.CalyxCrimson.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Calyx_Crimson])
|
|
|
|
option_add(
|
|
|
|
keys='DungeonDaily.StagnantShadow.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Stagnant_Shadow])
|
|
|
|
option_add(
|
|
|
|
keys='DungeonDaily.CavernOfCorrosion.option',
|
|
|
|
options=[dungeon.name for dungeon in DungeonList.instances.values() if dungeon.is_Cavern_of_Corrosion])
|
|
|
|
# Insert characters
|
|
|
|
from tasks.character.keywords import CharacterList
|
2023-09-20 05:35:15 +00:00
|
|
|
unsupported_characters = []
|
2023-08-27 09:13:05 +00:00
|
|
|
characters = [character.name for character in CharacterList.instances.values()
|
|
|
|
if character.name not in unsupported_characters]
|
|
|
|
option_add(keys='DungeonSupport.Character.option', options=characters)
|
|
|
|
# Insert daily quests
|
|
|
|
from tasks.daily.keywords import DailyQuest
|
|
|
|
for quest in DailyQuest.instances.values():
|
|
|
|
quest: DailyQuest
|
|
|
|
deep_set(raw, keys=['AchievableQuest', quest.name], value={
|
|
|
|
'type': 'state',
|
|
|
|
'value': 'achievable',
|
|
|
|
'option': ['achievable', 'not_set', 'not_supported'],
|
|
|
|
'option_bold': ['achievable'],
|
|
|
|
'option_light': ['not_supported'],
|
|
|
|
})
|
|
|
|
|
|
|
|
# Load
|
2023-05-14 07:48:34 +00:00
|
|
|
for path, value in deep_iter(raw, depth=2):
|
|
|
|
arg = {
|
|
|
|
'type': 'input',
|
|
|
|
'value': '',
|
|
|
|
# option
|
|
|
|
}
|
|
|
|
if not isinstance(value, dict):
|
|
|
|
value = {'value': value}
|
|
|
|
arg['type'] = data_to_type(value, arg=path[1])
|
2023-08-27 09:13:05 +00:00
|
|
|
if arg['type'] == 'stored':
|
|
|
|
value['value'] = {}
|
|
|
|
arg['display'] = 'hide' # Hide `stored` by default
|
2023-05-14 07:48:34 +00:00
|
|
|
if isinstance(value['value'], datetime):
|
|
|
|
arg['type'] = 'datetime'
|
|
|
|
arg['validate'] = 'datetime'
|
|
|
|
# Manual definition has the highest priority
|
|
|
|
arg.update(value)
|
|
|
|
deep_set(data, keys=path, value=arg)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def task(self):
|
|
|
|
"""
|
2023-06-17 15:11:57 +00:00
|
|
|
<task_group>:
|
|
|
|
<task>:
|
|
|
|
<group>:
|
2023-05-14 07:48:34 +00:00
|
|
|
"""
|
|
|
|
return read_file(filepath_argument('task'))
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def default(self):
|
|
|
|
"""
|
|
|
|
<task>:
|
|
|
|
<group>:
|
|
|
|
<argument>: value
|
|
|
|
"""
|
|
|
|
return read_file(filepath_argument('default'))
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def override(self):
|
|
|
|
"""
|
|
|
|
<task>:
|
|
|
|
<group>:
|
|
|
|
<argument>: value
|
|
|
|
"""
|
|
|
|
return read_file(filepath_argument('override'))
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def gui(self):
|
|
|
|
"""
|
|
|
|
<i18n_group>:
|
|
|
|
<i18n_key>: value, value is None
|
|
|
|
"""
|
|
|
|
return read_file(filepath_argument('gui'))
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
@timer
|
|
|
|
def args(self):
|
|
|
|
"""
|
|
|
|
Merge definitions into standardised json.
|
|
|
|
|
|
|
|
task.yaml ---+
|
|
|
|
argument.yaml ---+-----> args.json
|
|
|
|
override.yaml ---+
|
|
|
|
default.yaml ---+
|
|
|
|
|
|
|
|
"""
|
|
|
|
# Construct args
|
|
|
|
data = {}
|
2023-06-17 15:11:57 +00:00
|
|
|
for path, groups in deep_iter(self.task, depth=3):
|
|
|
|
if 'tasks' not in path:
|
|
|
|
continue
|
|
|
|
task = path[2]
|
2023-05-14 07:48:34 +00:00
|
|
|
# Add storage to all task
|
2023-06-17 16:13:21 +00:00
|
|
|
# groups.append('Storage')
|
2023-05-14 07:48:34 +00:00
|
|
|
for group in groups:
|
|
|
|
if group not in self.argument:
|
|
|
|
print(f'`{task}.{group}` is not related to any argument group')
|
|
|
|
continue
|
|
|
|
deep_set(data, keys=[task, group], value=deepcopy(self.argument[group]))
|
|
|
|
|
|
|
|
def check_override(path, value):
|
|
|
|
# Check existence
|
|
|
|
old = deep_get(data, keys=path, default=None)
|
|
|
|
if old is None:
|
|
|
|
print(f'`{".".join(path)}` is not a existing argument')
|
|
|
|
return False
|
|
|
|
# Check type
|
|
|
|
# But allow `Interval` to be different
|
|
|
|
old_value = old.get('value', None) if isinstance(old, dict) else old
|
|
|
|
value = old.get('value', None) if isinstance(value, dict) else value
|
|
|
|
if type(value) != type(old_value) \
|
|
|
|
and old_value is not None \
|
|
|
|
and path[2] not in ['SuccessInterval', 'FailureInterval']:
|
|
|
|
print(
|
|
|
|
f'`{value}` ({type(value)}) and `{".".join(path)}` ({type(old_value)}) are in different types')
|
|
|
|
return False
|
|
|
|
# Check option
|
|
|
|
if isinstance(old, dict) and 'option' in old:
|
|
|
|
if value not in old['option']:
|
|
|
|
print(f'`{value}` is not an option of argument `{".".join(path)}`')
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Set defaults
|
|
|
|
for p, v in deep_iter(self.default, depth=3):
|
|
|
|
if not check_override(p, v):
|
|
|
|
continue
|
|
|
|
deep_set(data, keys=p + ['value'], value=v)
|
|
|
|
# Override non-modifiable arguments
|
|
|
|
for p, v in deep_iter(self.override, depth=3):
|
|
|
|
if not check_override(p, v):
|
|
|
|
continue
|
|
|
|
if isinstance(v, dict):
|
2023-08-27 15:06:18 +00:00
|
|
|
typ = v.get('type')
|
|
|
|
if typ == 'state':
|
|
|
|
pass
|
|
|
|
elif typ == 'lock':
|
2023-05-14 07:48:34 +00:00
|
|
|
deep_default(v, keys='display', value="disabled")
|
|
|
|
elif deep_get(v, keys='value') is not None:
|
|
|
|
deep_default(v, keys='display', value='hide')
|
|
|
|
for arg_k, arg_v in v.items():
|
|
|
|
deep_set(data, keys=p + [arg_k], value=arg_v)
|
|
|
|
else:
|
|
|
|
deep_set(data, keys=p + ['value'], value=v)
|
|
|
|
deep_set(data, keys=p + ['display'], value='hide')
|
|
|
|
# Set command
|
2023-06-17 15:11:57 +00:00
|
|
|
for path, groups in deep_iter(self.task, depth=3):
|
|
|
|
if 'tasks' not in path:
|
|
|
|
continue
|
|
|
|
task = path[2]
|
2023-05-14 07:48:34 +00:00
|
|
|
if deep_get(data, keys=f'{task}.Scheduler.Command'):
|
|
|
|
deep_set(data, keys=f'{task}.Scheduler.Command.value', value=task)
|
|
|
|
deep_set(data, keys=f'{task}.Scheduler.Command.display', value='hide')
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
@timer
|
|
|
|
def generate_code(self):
|
|
|
|
"""
|
|
|
|
Generate python code.
|
|
|
|
|
|
|
|
args.json ---> config_generated.py
|
|
|
|
|
|
|
|
"""
|
|
|
|
visited_group = set()
|
|
|
|
visited_path = set()
|
|
|
|
lines = CONFIG_IMPORT
|
|
|
|
for path, data in deep_iter(self.argument, depth=2):
|
|
|
|
group, arg = path
|
|
|
|
if group not in visited_group:
|
|
|
|
lines.append('')
|
|
|
|
lines.append(f' # Group `{group}`')
|
|
|
|
visited_group.add(group)
|
|
|
|
|
|
|
|
option = ''
|
|
|
|
if 'option' in data and data['option']:
|
|
|
|
option = ' # ' + ', '.join([str(opt) for opt in data['option']])
|
|
|
|
path = '.'.join(path)
|
|
|
|
lines.append(f' {path_to_arg(path)} = {repr(parse_value(data["value"], data=data))}{option}')
|
|
|
|
visited_path.add(path)
|
|
|
|
|
|
|
|
with open(filepath_code(), 'w', encoding='utf-8', newline='') as f:
|
|
|
|
for text in lines:
|
|
|
|
f.write(text + '\n')
|
|
|
|
|
2023-08-27 09:13:05 +00:00
|
|
|
@timer
|
|
|
|
def generate_stored(self):
|
|
|
|
import module.config.stored.classes as classes
|
|
|
|
gen = get_generator()
|
|
|
|
gen.add('from module.config.stored.classes import (')
|
|
|
|
with gen.tab():
|
|
|
|
for cls in sorted([name for name in dir(classes) if name.startswith('Stored')]):
|
|
|
|
gen.add(cls + ',')
|
|
|
|
gen.add(')')
|
|
|
|
gen.Empty()
|
|
|
|
gen.Empty()
|
|
|
|
gen.Empty()
|
|
|
|
gen.CommentAutoGenerage('module/config/config_updater.py')
|
|
|
|
|
|
|
|
with gen.Class('StoredGenerated'):
|
|
|
|
for path, data in deep_iter(self.args, depth=3):
|
|
|
|
cls = data.get('stored')
|
|
|
|
if cls:
|
|
|
|
gen.add(f'{path[-1]} = {cls}("{".".join(path)}")')
|
|
|
|
|
|
|
|
gen.write('module/config/stored/stored_generated.py')
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
@timer
|
|
|
|
def generate_i18n(self, lang):
|
|
|
|
"""
|
|
|
|
Load old translations and generate new translation file.
|
|
|
|
|
|
|
|
args.json ---+-----> i18n/<lang>.json
|
|
|
|
(old) i18n/<lang>.json ---+
|
|
|
|
|
|
|
|
"""
|
|
|
|
new = {}
|
|
|
|
old = read_file(filepath_i18n(lang))
|
|
|
|
|
|
|
|
def deep_load(keys, default=True, words=('name', 'help')):
|
|
|
|
for word in words:
|
|
|
|
k = keys + [str(word)]
|
|
|
|
d = ".".join(k) if default else str(word)
|
|
|
|
v = deep_get(old, keys=k, default=d)
|
|
|
|
deep_set(new, keys=k, value=v)
|
|
|
|
|
|
|
|
# Menu
|
2023-06-17 15:11:57 +00:00
|
|
|
for path, data in deep_iter(self.task, depth=3):
|
|
|
|
if 'tasks' not in path:
|
|
|
|
continue
|
|
|
|
task_group, _, task = path
|
|
|
|
deep_load(['Menu', task_group])
|
|
|
|
deep_load(['Task', task])
|
2023-05-14 07:48:34 +00:00
|
|
|
# Arguments
|
|
|
|
visited_group = set()
|
|
|
|
for path, data in deep_iter(self.argument, depth=2):
|
|
|
|
if path[0] not in visited_group:
|
|
|
|
deep_load([path[0], '_info'])
|
|
|
|
visited_group.add(path[0])
|
|
|
|
deep_load(path)
|
|
|
|
if 'option' in data:
|
|
|
|
deep_load(path, words=data['option'], default=False)
|
|
|
|
|
|
|
|
# Package names
|
2023-09-15 19:39:12 +00:00
|
|
|
# for package, server in VALID_PACKAGE.items():
|
|
|
|
# path = ['Emulator', 'PackageName', package]
|
|
|
|
# if deep_get(new, keys=path) == package:
|
|
|
|
# deep_set(new, keys=path, value=server.upper())
|
|
|
|
# for package, server_and_channel in VALID_CHANNEL_PACKAGE.items():
|
|
|
|
# server, channel = server_and_channel
|
|
|
|
# name = deep_get(new, keys=['Emulator', 'PackageName', to_package(server)])
|
|
|
|
# if lang == SERVER_TO_LANG[server]:
|
|
|
|
# value = f'{name} {channel}渠道服 {package}'
|
|
|
|
# else:
|
|
|
|
# value = f'{name} {package}'
|
|
|
|
# deep_set(new, keys=['Emulator', 'PackageName', package], value=value)
|
2023-05-14 07:48:34 +00:00
|
|
|
# Game server names
|
|
|
|
# for server, _list in VALID_SERVER_LIST.items():
|
|
|
|
# for index in range(len(_list)):
|
|
|
|
# path = ['Emulator', 'ServerName', f'{server}-{index}']
|
|
|
|
# prefix = server.split('_')[0].upper()
|
|
|
|
# prefix = '国服' if prefix == 'CN' else prefix
|
|
|
|
# deep_set(new, keys=path, value=f'[{prefix}] {_list[index]}')
|
2023-06-17 16:13:21 +00:00
|
|
|
|
|
|
|
# Dungeon names
|
2023-06-27 17:17:24 +00:00
|
|
|
if lang not in ['zh-CN', 'zh-TW', 'en-US']:
|
|
|
|
ingame_lang = gui_lang_to_ingame_lang(lang)
|
|
|
|
from tasks.dungeon.keywords import DungeonList
|
|
|
|
dailies = deep_get(self.argument, keys='Dungeon.Name.option')
|
|
|
|
for dungeon in DungeonList.instances.values():
|
|
|
|
if dungeon.name in dailies:
|
|
|
|
value = dungeon.__getattribute__(ingame_lang)
|
|
|
|
deep_set(new, keys=['Dungeon', 'Name', dungeon.name], value=value)
|
2023-06-17 16:13:21 +00:00
|
|
|
|
2023-08-27 09:13:05 +00:00
|
|
|
# Copy dungeon i18n to double events
|
|
|
|
def update_dungeon_names(keys):
|
|
|
|
for dungeon in deep_get(new, keys=keys).values():
|
|
|
|
if '_' in dungeon:
|
|
|
|
value = deep_get(new, keys=['Dungeon', 'Name', dungeon])
|
|
|
|
if value:
|
|
|
|
deep_set(new, keys=f'{keys}.{dungeon}', value=value)
|
|
|
|
|
|
|
|
update_dungeon_names('Dungeon.NameAtDoubleCalyx')
|
|
|
|
update_dungeon_names('Dungeon.NameAtDoubleRelic')
|
|
|
|
update_dungeon_names('DungeonDaily.CalyxGolden')
|
|
|
|
update_dungeon_names('DungeonDaily.CalyxCrimson')
|
|
|
|
update_dungeon_names('DungeonDaily.StagnantShadow')
|
|
|
|
update_dungeon_names('DungeonDaily.CavernOfCorrosion')
|
|
|
|
|
|
|
|
# Character names
|
2023-07-24 10:15:22 +00:00
|
|
|
from tasks.character.keywords import CharacterList
|
|
|
|
ingame_lang = gui_lang_to_ingame_lang(lang)
|
2023-08-27 09:13:05 +00:00
|
|
|
characters = deep_get(self.argument, keys='DungeonSupport.Character.option')
|
2023-07-24 10:15:22 +00:00
|
|
|
for character in CharacterList.instances.values():
|
|
|
|
if character.name in characters:
|
|
|
|
value = character.__getattribute__(ingame_lang)
|
|
|
|
if "Trailblazer" in value:
|
|
|
|
continue
|
2023-08-27 09:13:05 +00:00
|
|
|
deep_set(new, keys=['DungeonSupport', 'Character', character.name], value=value)
|
|
|
|
|
|
|
|
# Daily quests
|
|
|
|
from tasks.daily.keywords import DailyQuest
|
|
|
|
for quest in DailyQuest.instances.values():
|
|
|
|
value = quest.__getattribute__(ingame_lang)
|
|
|
|
deep_set(new, keys=['AchievableQuest', quest.name, 'name'], value=value)
|
|
|
|
# deep_set(new, keys=['DailyQuest', quest.name, 'help'], value='')
|
|
|
|
copy_from = 'Complete_1_Daily_Mission'
|
|
|
|
if quest.name != copy_from:
|
|
|
|
for option in deep_get(self.args, keys=['DailyQuest', 'AchievableQuest', copy_from, 'option']):
|
|
|
|
value = deep_get(new, keys=['AchievableQuest', copy_from, option])
|
|
|
|
deep_set(new, keys=['AchievableQuest', quest.name, option], value=value)
|
2023-07-24 10:15:22 +00:00
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
# GUI i18n
|
|
|
|
for path, _ in deep_iter(self.gui, depth=2):
|
|
|
|
group, key = path
|
|
|
|
deep_load(keys=['Gui', group], words=(key,))
|
|
|
|
|
2023-08-27 09:18:42 +00:00
|
|
|
# zh-TW
|
|
|
|
dic_repl = {
|
|
|
|
'設置': '設定',
|
|
|
|
'支持': '支援',
|
|
|
|
'啓': '啟',
|
|
|
|
'异': '異',
|
|
|
|
'服務器': '伺服器',
|
|
|
|
'文件': '檔案',
|
2023-09-10 15:48:24 +00:00
|
|
|
'自定義': '自訂'
|
2023-08-27 09:18:42 +00:00
|
|
|
}
|
|
|
|
if lang == 'zh-TW':
|
|
|
|
for path, value in deep_iter(new, depth=3):
|
|
|
|
for before, after in dic_repl.items():
|
|
|
|
value = value.replace(before, after)
|
|
|
|
deep_set(new, keys=path, value=value)
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
write_file(filepath_i18n(lang), new)
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def menu(self):
|
|
|
|
"""
|
|
|
|
Generate menu definitions
|
|
|
|
|
|
|
|
task.yaml --> menu.json
|
|
|
|
|
|
|
|
"""
|
|
|
|
data = {}
|
2023-06-17 15:11:57 +00:00
|
|
|
for task_group in self.task.keys():
|
|
|
|
value = deep_get(self.task, keys=[task_group, 'menu'])
|
|
|
|
if value not in ['collapse', 'list']:
|
|
|
|
value = 'collapse'
|
|
|
|
deep_set(data, keys=[task_group, 'menu'], value=value)
|
|
|
|
value = deep_get(self.task, keys=[task_group, 'page'])
|
|
|
|
if value not in ['setting', 'tool']:
|
|
|
|
value = 'setting'
|
|
|
|
deep_set(data, keys=[task_group, 'page'], value=value)
|
|
|
|
tasks = deep_get(self.task, keys=[task_group, 'tasks'], default={})
|
|
|
|
tasks = list(tasks.keys())
|
|
|
|
deep_set(data, keys=[task_group, 'tasks'], value=tasks)
|
2023-05-14 07:48:34 +00:00
|
|
|
|
2023-09-11 15:34:54 +00:00
|
|
|
# Simulated universe is WIP, task won't show on GUI but can still be bound
|
|
|
|
# e.g. `RogueUI('src', task='Rogue')`
|
|
|
|
# Comment this for development
|
|
|
|
data.pop('Rogue')
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
return data
|
|
|
|
|
2023-08-29 14:35:13 +00:00
|
|
|
@cached_property
|
|
|
|
def stored(self):
|
|
|
|
import module.config.stored.classes as classes
|
|
|
|
data = {}
|
|
|
|
for path, value in deep_iter(self.args, depth=3):
|
|
|
|
if value.get('type') != 'stored':
|
|
|
|
continue
|
|
|
|
name = path[-1]
|
|
|
|
stored = value.get('stored')
|
|
|
|
stored_class = getattr(classes, stored)
|
|
|
|
row = {
|
|
|
|
'name': name,
|
|
|
|
'path': '.'.join(path),
|
|
|
|
'i18n': f'{path[1]}.{path[2]}.name',
|
|
|
|
'stored': stored,
|
|
|
|
'attrs': stored_class('')._attrs,
|
|
|
|
'order': value.get('order', 0),
|
|
|
|
'color': value.get('color', '#777777')
|
|
|
|
}
|
|
|
|
data[name] = row
|
|
|
|
|
|
|
|
# sort by `order` ascending, but `order`==0 at last
|
|
|
|
data = sorted(data.items(), key=lambda kv: (kv[1]['order'] == 0, kv[1]['order']))
|
|
|
|
data = {k: v for k, v in data}
|
|
|
|
return data
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
@staticmethod
|
|
|
|
def generate_deploy_template():
|
|
|
|
template = poor_yaml_read(DEPLOY_TEMPLATE)
|
|
|
|
cn = {
|
2023-09-09 10:36:14 +00:00
|
|
|
'Repository': 'cn',
|
2023-05-14 07:48:34 +00:00
|
|
|
'PypiMirror': 'https://pypi.tuna.tsinghua.edu.cn/simple',
|
|
|
|
'Language': 'zh-CN',
|
|
|
|
}
|
|
|
|
aidlux = {
|
|
|
|
'GitExecutable': '/usr/bin/git',
|
|
|
|
'PythonExecutable': '/usr/bin/python',
|
|
|
|
'RequirementsFile': './deploy/AidLux/0.92/requirements.txt',
|
|
|
|
'AdbExecutable': '/usr/bin/adb',
|
|
|
|
}
|
|
|
|
|
|
|
|
docker = {
|
|
|
|
'GitExecutable': '/usr/bin/git',
|
|
|
|
'PythonExecutable': '/usr/local/bin/python',
|
|
|
|
'RequirementsFile': './deploy/docker/requirements.txt',
|
|
|
|
'AdbExecutable': '/usr/bin/adb',
|
|
|
|
}
|
|
|
|
|
|
|
|
def update(suffix, *args):
|
|
|
|
file = f'./config/deploy.{suffix}.yaml'
|
|
|
|
new = deepcopy(template)
|
|
|
|
for dic in args:
|
|
|
|
new.update(dic)
|
|
|
|
poor_yaml_write(data=new, file=file)
|
|
|
|
|
|
|
|
update('template')
|
|
|
|
update('template-cn', cn)
|
|
|
|
# update('template-AidLux', aidlux)
|
|
|
|
# update('template-AidLux-cn', aidlux, cn)
|
|
|
|
# update('template-docker', docker)
|
|
|
|
# update('template-docker-cn', docker, cn)
|
|
|
|
|
2023-09-09 18:10:57 +00:00
|
|
|
tpl = {
|
|
|
|
'Repository': '{{repository}}',
|
|
|
|
'GitExecutable': '{{gitExecutable}}',
|
|
|
|
'PythonExecutable': '{{pythonExecutable}}',
|
|
|
|
'AdbExecutable': '{{adbExecutable}}',
|
|
|
|
'Language': '{{language}}',
|
|
|
|
'Theme': '{{theme}}',
|
|
|
|
}
|
2023-09-11 15:34:54 +00:00
|
|
|
|
2023-09-09 18:10:57 +00:00
|
|
|
def update(file, *args):
|
|
|
|
new = deepcopy(template)
|
|
|
|
for dic in args:
|
|
|
|
new.update(dic)
|
|
|
|
poor_yaml_write(data=new, file=file)
|
|
|
|
|
|
|
|
update('./webapp/packages/main/public/deploy.yaml.tpl', tpl)
|
|
|
|
|
2023-06-25 16:07:52 +00:00
|
|
|
def insert_assignment(self):
|
|
|
|
from tasks.assignment.keywords import AssignmentEntry
|
|
|
|
assignments = [entry.name for entry in AssignmentEntry.instances.values()]
|
|
|
|
for i in range(4):
|
2023-07-10 15:36:35 +00:00
|
|
|
deep_set(self.argument, keys=f'Assignment.Name_{i + 1}.option', value=assignments)
|
|
|
|
deep_set(self.args, keys=f'Assignment.Assignment.Name_{i + 1}.option', value=assignments)
|
2023-06-17 16:13:21 +00:00
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
@timer
|
|
|
|
def generate(self):
|
|
|
|
_ = self.args
|
|
|
|
_ = self.menu
|
2023-08-29 14:35:13 +00:00
|
|
|
_ = self.stored
|
2023-05-14 07:48:34 +00:00
|
|
|
# _ = self.event
|
2023-06-25 16:07:52 +00:00
|
|
|
self.insert_assignment()
|
2023-05-14 07:48:34 +00:00
|
|
|
# self.insert_server()
|
|
|
|
write_file(filepath_args(), self.args)
|
|
|
|
write_file(filepath_args('menu'), self.menu)
|
2023-08-29 14:35:13 +00:00
|
|
|
write_file(filepath_args('stored'), self.stored)
|
2023-05-14 07:48:34 +00:00
|
|
|
self.generate_code()
|
2023-08-27 09:13:05 +00:00
|
|
|
self.generate_stored()
|
2023-05-14 07:48:34 +00:00
|
|
|
for lang in LANGUAGES:
|
|
|
|
self.generate_i18n(lang)
|
|
|
|
self.generate_deploy_template()
|
|
|
|
|
|
|
|
|
|
|
|
class ConfigUpdater:
|
|
|
|
# source, target, (optional)convert_func
|
|
|
|
redirection = [
|
2023-08-27 09:13:05 +00:00
|
|
|
('Dungeon.Dungeon.Support', 'Dungeon.DungeonSupport.Use'),
|
|
|
|
('Dungeon.Dungeon.SupportCharacter', 'Dungeon.DungeonSupport.Character'),
|
2023-08-30 10:46:18 +00:00
|
|
|
('Dungeon.Dungeon.Name', 'Dungeon.Dungeon.Name', convert_daily),
|
|
|
|
('Dungeon.Dungeon.NameAtDoubleCalyx', 'Dungeon.Dungeon.NameAtDoubleCalyx', convert_daily),
|
|
|
|
('Dungeon.DungeonDaily.CalyxCrimson', 'Dungeon.DungeonDaily.CalyxCrimson', convert_daily),
|
2023-05-14 07:48:34 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def args(self):
|
|
|
|
return read_file(filepath_args())
|
|
|
|
|
|
|
|
def config_update(self, old, is_template=False):
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
old (dict):
|
|
|
|
is_template (bool):
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict:
|
|
|
|
"""
|
|
|
|
new = {}
|
|
|
|
|
|
|
|
def deep_load(keys):
|
|
|
|
data = deep_get(self.args, keys=keys, default={})
|
|
|
|
value = deep_get(old, keys=keys, default=data['value'])
|
2023-08-27 09:13:05 +00:00
|
|
|
typ = data['type']
|
|
|
|
display = data.get('display')
|
|
|
|
if is_template or value is None or value == '' or typ == 'lock' or (display == 'hide' and typ != 'stored'):
|
2023-05-14 07:48:34 +00:00
|
|
|
value = data['value']
|
|
|
|
value = parse_value(value, data=data)
|
|
|
|
deep_set(new, keys=keys, value=value)
|
|
|
|
|
|
|
|
for path, _ in deep_iter(self.args, depth=3):
|
|
|
|
deep_load(path)
|
|
|
|
|
|
|
|
if not is_template:
|
|
|
|
new = self.config_redirect(old, new)
|
2023-08-27 09:13:05 +00:00
|
|
|
new = self.update_state(new)
|
2023-05-14 07:48:34 +00:00
|
|
|
|
|
|
|
return new
|
|
|
|
|
|
|
|
def config_redirect(self, old, new):
|
|
|
|
"""
|
|
|
|
Convert old settings to the new.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
old (dict):
|
|
|
|
new (dict):
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict:
|
|
|
|
"""
|
|
|
|
for row in self.redirection:
|
|
|
|
if len(row) == 2:
|
|
|
|
source, target = row
|
|
|
|
update_func = None
|
|
|
|
elif len(row) == 3:
|
|
|
|
source, target, update_func = row
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if isinstance(source, tuple):
|
|
|
|
value = []
|
|
|
|
error = False
|
|
|
|
for attribute in source:
|
|
|
|
tmp = deep_get(old, keys=attribute)
|
|
|
|
if tmp is None:
|
|
|
|
error = True
|
|
|
|
continue
|
|
|
|
value.append(tmp)
|
|
|
|
if error:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
value = deep_get(old, keys=source)
|
|
|
|
if value is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if update_func is not None:
|
|
|
|
value = update_func(value)
|
|
|
|
|
|
|
|
if isinstance(target, tuple):
|
|
|
|
for k, v in zip(target, value):
|
|
|
|
# Allow update same key
|
|
|
|
if (deep_get(old, keys=k) is None) or (source == target):
|
|
|
|
deep_set(new, keys=k, value=v)
|
|
|
|
elif (deep_get(old, keys=target) is None) or (source == target):
|
|
|
|
deep_set(new, keys=target, value=value)
|
|
|
|
|
|
|
|
return new
|
|
|
|
|
2023-08-27 09:13:05 +00:00
|
|
|
@staticmethod
|
|
|
|
def update_state(data):
|
|
|
|
def set_daily(quest, value):
|
|
|
|
if value is True:
|
|
|
|
value = 'achievable'
|
|
|
|
if value is False:
|
|
|
|
value = 'not_set'
|
|
|
|
deep_set(data, keys=['DailyQuest', 'AchievableQuest', quest], value=value)
|
|
|
|
|
|
|
|
set_daily('Complete_1_Daily_Mission', 'not_supported')
|
|
|
|
# Dungeon
|
|
|
|
dungeon = deep_get(data, keys='Dungeon.Scheduler.Enable')
|
|
|
|
set_daily('Clear_Calyx_Golden_1_times',
|
|
|
|
dungeon and deep_get(data, 'Dungeon.DungeonDaily.CalyxGolden') != 'do_not_achieve')
|
|
|
|
set_daily('Complete_Calyx_Crimson_1_time',
|
|
|
|
dungeon and deep_get(data, 'Dungeon.DungeonDaily.CalyxCrimson') != 'do_not_achieve')
|
|
|
|
set_daily('Clear_Stagnant_Shadow_1_times',
|
|
|
|
dungeon and deep_get(data, 'Dungeon.DungeonDaily.StagnantShadow') != 'do_not_achieve')
|
|
|
|
set_daily('Clear_Cavern_of_Corrosion_1_times',
|
|
|
|
dungeon and deep_get(data, 'Dungeon.DungeonDaily.CavernOfCorrosion') != 'do_not_achieve')
|
|
|
|
# Combat requirements
|
|
|
|
set_daily('In_a_single_battle_inflict_3_Weakness_Break_of_different_Types', 'not_supported')
|
|
|
|
set_daily('Inflict_Weakness_Break_5_times', 'not_supported')
|
|
|
|
set_daily('Defeat_a_total_of_20_enemies', 'not_supported')
|
|
|
|
set_daily('Enter_combat_by_attacking_enemy_Weakness_and_win_3_times', 'not_supported')
|
|
|
|
set_daily('Use_Technique_2_times', 'achievable')
|
|
|
|
# Other game systems
|
|
|
|
set_daily('Go_on_assignment_1_time', deep_get(data, 'Assignment.Scheduler.Enable'))
|
|
|
|
set_daily('Take_1_photo', 'achievable')
|
|
|
|
set_daily('Destroy_3_destructible_objects', 'not_supported')
|
2023-09-23 11:39:53 +00:00
|
|
|
set_daily('Complete_Forgotten_Hall_1_time', 'achievable')
|
2023-08-27 09:13:05 +00:00
|
|
|
set_daily('Complete_Echo_of_War_1_times', 'not_supported')
|
|
|
|
set_daily('Complete_1_stage_in_Simulated_Universe_Any_world', 'not_supported')
|
|
|
|
set_daily('Obtain_victory_in_combat_with_support_characters_1_time',
|
|
|
|
dungeon and deep_get(data, 'Dungeon.DungeonSupport.Use') in ['when_daily', 'always_use'])
|
|
|
|
set_daily('Use_an_Ultimate_to_deal_the_final_blow_1_time', 'not_supported')
|
|
|
|
# Build
|
|
|
|
set_daily('Level_up_any_character_1_time', 'not_supported')
|
|
|
|
set_daily('Level_up_any_Light_Cone_1_time', 'not_supported')
|
|
|
|
set_daily('Level_up_any_Relic_1_time', 'not_supported')
|
|
|
|
# Items
|
|
|
|
set_daily('Salvage_any_Relic', 'achievable')
|
|
|
|
set_daily('Synthesize_Consumable_1_time', 'achievable')
|
|
|
|
set_daily('Synthesize_material_1_time', 'achievable')
|
|
|
|
set_daily('Use_Consumables_1_time', 'achievable')
|
|
|
|
return data
|
|
|
|
|
2023-05-14 07:48:34 +00:00
|
|
|
def read_file(self, config_name, is_template=False):
|
|
|
|
"""
|
|
|
|
Read and update config file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_name (str): ./config/{file}.json
|
|
|
|
is_template (bool):
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict:
|
|
|
|
"""
|
|
|
|
old = read_file(filepath_config(config_name))
|
|
|
|
new = self.config_update(old, is_template=is_template)
|
|
|
|
# The updated config did not write into file, although it doesn't matters.
|
|
|
|
# Commented for performance issue
|
|
|
|
# self.write_file(config_name, new)
|
|
|
|
return new
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def write_file(config_name, data, mod_name='alas'):
|
|
|
|
"""
|
|
|
|
Write config file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_name (str): ./config/{file}.json
|
|
|
|
data (dict):
|
|
|
|
mod_name (str):
|
|
|
|
"""
|
|
|
|
write_file(filepath_config(config_name, mod_name), data)
|
|
|
|
|
|
|
|
@timer
|
|
|
|
def update_file(self, config_name, is_template=False):
|
|
|
|
"""
|
|
|
|
Read, update and write config file.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_name (str): ./config/{file}.json
|
|
|
|
is_template (bool):
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict:
|
|
|
|
"""
|
|
|
|
data = self.read_file(config_name, is_template=is_template)
|
|
|
|
self.write_file(config_name, data)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
"""
|
|
|
|
Process the whole config generation.
|
|
|
|
|
|
|
|
task.yaml -+----------------> menu.json
|
|
|
|
argument.yaml -+-> args.json ---> config_generated.py
|
|
|
|
override.yaml -+ |
|
|
|
|
gui.yaml --------\|
|
|
|
|
||
|
|
|
|
(old) i18n/<lang>.json --------\\========> i18n/<lang>.json
|
|
|
|
(old) template.json ---------\========> template.json
|
|
|
|
"""
|
|
|
|
# Ensure running in Alas root folder
|
|
|
|
import os
|
|
|
|
|
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '../../'))
|
|
|
|
|
|
|
|
ConfigGenerator().generate()
|
|
|
|
ConfigUpdater().update_file('template', is_template=True)
|