mirror of
https://github.com/PaiGramTeam/HonkaiStarRailWikiDataParser.git
synced 2025-01-30 18:38:36 +00:00
🐛 fix: relic set image
This commit is contained in:
parent
de0d049d60
commit
393316df10
@ -3,6 +3,7 @@ import asyncio
|
||||
from res_func.avatar import fix_avatar_config, fetch_text_map
|
||||
from res_func.light_cone import fix_light_cone_config
|
||||
from res_func.relic import fetch_relic_config
|
||||
from res_func.relic_res import fix_set_image
|
||||
from res_func.honkai_gg.avatar import get_all_avatars
|
||||
|
||||
|
||||
@ -11,6 +12,7 @@ async def main():
|
||||
await fix_avatar_config(text_map_data)
|
||||
await fix_light_cone_config()
|
||||
await fetch_relic_config(text_map_data)
|
||||
await fix_set_image()
|
||||
await get_all_avatars()
|
||||
|
||||
|
||||
|
@ -18,9 +18,11 @@ async def fetch_relics(data: Children):
|
||||
for content in data.list:
|
||||
relic = Relic(
|
||||
id=content.content_id,
|
||||
bbs_id=content.content_id,
|
||||
name=content.title,
|
||||
icon=content.icon,
|
||||
affect="",
|
||||
image_list=[],
|
||||
)
|
||||
all_relics.append(relic)
|
||||
|
||||
@ -55,6 +57,6 @@ async def read_relics(path: Path):
|
||||
all_relics.clear()
|
||||
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for monster in data:
|
||||
m = Relic(**monster)
|
||||
for relic in data:
|
||||
m = Relic(**relic)
|
||||
all_relics.append(m)
|
||||
|
@ -1,13 +1,19 @@
|
||||
# 遗器套装
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Relic(BaseModel):
|
||||
id: int
|
||||
"""遗器套装ID"""
|
||||
bbs_id: int
|
||||
"""WIKI ID"""
|
||||
name: str
|
||||
"""套装名称"""
|
||||
icon: str
|
||||
"""套装图标"""
|
||||
affect: str
|
||||
"""套装效果"""
|
||||
image_list: List[str]
|
||||
"""套装子图"""
|
||||
|
@ -33,6 +33,11 @@ class RelicAffixAll(BaseModel):
|
||||
sub_affix: Dict[str, SingleRelicAffix]
|
||||
""" 副词条 """
|
||||
|
||||
@property
|
||||
def rarity(self) -> int:
|
||||
rarity_map = {9: 3, 12: 4, 16: 5}
|
||||
return rarity_map.get(self.max_level, 2)
|
||||
|
||||
@root_validator(pre=True)
|
||||
def transform_dicts(cls, values):
|
||||
for data in ["main_affix", "sub_affix"]:
|
||||
|
34
res_func/relic_res.py
Normal file
34
res_func/relic_res.py
Normal file
@ -0,0 +1,34 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from pathlib import Path
|
||||
from typing import Dict, Tuple, List
|
||||
|
||||
from func.fetch_relics import all_relics, read_relics, dump_relics
|
||||
from res_func.client import client
|
||||
from res_func.url import relic_url, base_station_url
|
||||
|
||||
relics_path = Path("data") / "relics.json"
|
||||
|
||||
|
||||
async def fix_set_image():
|
||||
print("开始修复遗器套装图片")
|
||||
await read_relics(relics_path)
|
||||
req = await client.get(relic_url)
|
||||
soup = BeautifulSoup(req.text, "lxml")
|
||||
divs = soup.find_all("a", {"class": "aff5a ae026 a2a45"})
|
||||
data_map: Dict[int, Tuple[str, List[str]]] = {}
|
||||
for div in divs:
|
||||
sid = int(div.get("href").split("/")[-1])
|
||||
images = div.find_all("img")
|
||||
images = [f"{base_station_url}{i.get('src')}" for i in images]
|
||||
if len(images) not in {3, 5}:
|
||||
print(f"套装 {sid} 图片数量异常")
|
||||
continue
|
||||
data_map[sid] = (images[0], images[1:])
|
||||
for relic in all_relics:
|
||||
if relic.id in data_map:
|
||||
relic.icon = data_map[relic.id][0]
|
||||
relic.image_list = data_map[relic.id][1]
|
||||
else:
|
||||
print(f"套装 {relic.id} 没有找到对应的图片")
|
||||
await dump_relics(relics_path)
|
||||
print("遗器套装图片修复完毕")
|
@ -12,6 +12,7 @@ relic_set_config = f"{base_data_url}ExcelOutput/RelicSetConfig.json"
|
||||
base_station_url = "https://starrailstation.com"
|
||||
avatar_url = f"{base_station_url}/cn/characters"
|
||||
light_cone_url = f"{base_station_url}/cn/equipment"
|
||||
relic_url = f"{base_station_url}/cn/relics"
|
||||
base_honkai_url = "https://honkai.gg"
|
||||
avatar_honkai_url = f"{base_honkai_url}/cn/characters"
|
||||
avatar_icon_honkai_url = f"{base_honkai_url}/images/spriteoutput/skillicons"
|
||||
|
Loading…
Reference in New Issue
Block a user