mirror of
https://github.com/PaiGramTeam/HonkaiStarRailWikiDataParser.git
synced 2025-01-30 18:38:36 +00:00
refactor: remove almost bbs wiki
This commit is contained in:
parent
b275cc7b96
commit
1e01b141f5
@ -11,10 +11,10 @@ async def main():
|
||||
text_map_data = await fetch_text_map()
|
||||
await fix_avatar_config(text_map_data)
|
||||
await fix_light_cone_config()
|
||||
await fetch_relic_config(text_map_data)
|
||||
await fetch_relic_config()
|
||||
await fix_set_image()
|
||||
await get_all_avatars()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
@ -1,3 +0,0 @@
|
||||
# wiki 解析
|
||||
|
||||
通过米游社官方 wiki 解析部分数据
|
@ -1,19 +1,32 @@
|
||||
from httpx import AsyncClient
|
||||
import asyncio
|
||||
|
||||
from httpx import AsyncClient, TimeoutException
|
||||
|
||||
headers = {
|
||||
'authority': 'api-static.mihoyo.com',
|
||||
'accept': 'application/json, text/plain, */*',
|
||||
'accept-language': 'zh-CN,zh;q=0.9,zh-Hans;q=0.8,und;q=0.7,en;q=0.6,zh-Hant;q=0.5,ja;q=0.4',
|
||||
'dnt': '1',
|
||||
'origin': 'https://bbs.mihoyo.com',
|
||||
'referer': 'https://bbs.mihoyo.com/',
|
||||
'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-site',
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
|
||||
"authority": "api-static.mihoyo.com",
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"accept-language": "zh-CN,zh;q=0.9,zh-Hans;q=0.8,und;q=0.7,en;q=0.6,zh-Hant;q=0.5,ja;q=0.4",
|
||||
"dnt": "1",
|
||||
"origin": "https://bbs.mihoyo.com",
|
||||
"referer": "https://bbs.mihoyo.com/",
|
||||
"sec-ch-ua": '"Chromium";v="110", "Not A(Brand";v="24"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"Windows"',
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-site",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
|
||||
}
|
||||
client = AsyncClient(headers=headers, timeout=120.0)
|
||||
|
||||
|
||||
def retry(func):
|
||||
async def wrapper(*args, **kwargs):
|
||||
for i in range(3):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except TimeoutException:
|
||||
print(f"重试 {func.__name__} {i + 1} 次")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
return wrapper
|
||||
|
104
func/data.py
Normal file
104
func/data.py
Normal file
@ -0,0 +1,104 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
|
||||
import aiofiles
|
||||
import ujson
|
||||
|
||||
from models.avatar import YattaAvatar
|
||||
from models.light_cone import YattaLightCone
|
||||
from models.material import YattaMaterial
|
||||
from models.relic import YattaRelic
|
||||
|
||||
data_path = Path("data")
|
||||
data_path.mkdir(exist_ok=True)
|
||||
|
||||
avatars_path = data_path / "avatars.json"
|
||||
all_avatars: List[YattaAvatar] = []
|
||||
all_avatars_map: Dict[int, YattaAvatar] = {}
|
||||
all_avatars_name: Dict[str, YattaAvatar] = {}
|
||||
|
||||
light_cones_path = data_path / "light_cones.json"
|
||||
all_light_cones: List[YattaLightCone] = []
|
||||
all_light_cones_map: Dict[int, YattaLightCone] = {}
|
||||
all_light_cones_name: Dict[str, YattaLightCone] = {}
|
||||
|
||||
materials_path = data_path / "materials.json"
|
||||
all_materials: List[YattaMaterial] = []
|
||||
all_materials_map: Dict[int, YattaMaterial] = {}
|
||||
all_materials_name: Dict[str, YattaMaterial] = {}
|
||||
|
||||
relics_path = data_path / "relics.json"
|
||||
all_relics: List[YattaRelic] = []
|
||||
|
||||
|
||||
async def dump_avatars():
|
||||
data = [avatar.dict() for avatar in all_avatars]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(avatars_path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_avatars():
|
||||
all_avatars.clear()
|
||||
all_avatars_map.clear()
|
||||
all_avatars_name.clear()
|
||||
async with aiofiles.open(avatars_path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for avatar in data:
|
||||
m = YattaAvatar(**avatar)
|
||||
all_avatars.append(m)
|
||||
all_avatars_map[m.id] = m
|
||||
all_avatars_name[m.name] = m
|
||||
|
||||
|
||||
async def dump_light_cones():
|
||||
data = [light_cone.dict() for light_cone in all_light_cones]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(light_cones_path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_light_cones():
|
||||
async with aiofiles.open(light_cones_path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
all_light_cones.clear()
|
||||
all_light_cones_map.clear()
|
||||
all_light_cones_name.clear()
|
||||
for light_cone in data:
|
||||
m = YattaLightCone(**light_cone)
|
||||
all_light_cones.append(m)
|
||||
all_light_cones_map[m.id] = m
|
||||
all_light_cones_name[m.name] = m
|
||||
|
||||
|
||||
async def dump_materials():
|
||||
data = [material.dict() for material in all_materials]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(materials_path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_materials():
|
||||
async with aiofiles.open(materials_path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for material in data:
|
||||
m = YattaMaterial(**material)
|
||||
all_materials.append(m)
|
||||
all_materials_map[m.id] = m
|
||||
all_materials_name[m.name] = m
|
||||
|
||||
|
||||
async def dump_relics():
|
||||
data = [relic.dict() for relic in all_relics]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(relics_path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_relics():
|
||||
all_relics.clear()
|
||||
async with aiofiles.open(relics_path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for relic in data:
|
||||
m = YattaRelic(**relic)
|
||||
all_relics.append(m)
|
@ -5,10 +5,10 @@ from func.url import list_url
|
||||
from models.wiki import Children
|
||||
|
||||
|
||||
async def get_list(channel_id: str = '17') -> List[Children]:
|
||||
async def get_list(channel_id: str = "17") -> List[Children]:
|
||||
params = {
|
||||
'app_sn': 'sr_wiki',
|
||||
'channel_id': channel_id,
|
||||
"app_sn": "sr_wiki",
|
||||
"channel_id": channel_id,
|
||||
}
|
||||
resp = await client.get(list_url, params=params)
|
||||
children = resp.json()["data"]["list"][0]["children"]
|
||||
|
@ -1,33 +1,13 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
|
||||
import aiofiles
|
||||
import ujson
|
||||
from httpx import TimeoutException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from func.client import client
|
||||
from func.client import client, retry
|
||||
from func.data import all_avatars, all_avatars_map, all_avatars_name, dump_avatars
|
||||
from models.avatar import YattaAvatar
|
||||
from models.wiki import Content, Children
|
||||
from res_func.url import avatar_yatta_url
|
||||
|
||||
all_avatars: List[YattaAvatar] = []
|
||||
all_avatars_map: Dict[int, YattaAvatar] = {}
|
||||
all_avatars_name: Dict[str, YattaAvatar] = {}
|
||||
|
||||
|
||||
def retry(func):
|
||||
async def wrapper(*args, **kwargs):
|
||||
for i in range(3):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except TimeoutException:
|
||||
print(f"重试 {func.__name__} {i + 1} 次")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def fix_avatar_eidolons(values: Dict) -> Dict:
|
||||
if values.get("eidolons") is None:
|
||||
@ -81,23 +61,4 @@ async def fetch_avatars(child: Children):
|
||||
if not avatar.icon.startswith("http"):
|
||||
avatar.icon = ""
|
||||
print("获取角色数据完成")
|
||||
|
||||
|
||||
async def dump_avatars(path: Path):
|
||||
data = [avatar.dict() for avatar in all_avatars]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_avatars(path: Path):
|
||||
all_avatars.clear()
|
||||
all_avatars_map.clear()
|
||||
all_avatars_name.clear()
|
||||
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for avatar in data:
|
||||
m = YattaAvatar(**avatar)
|
||||
all_avatars.append(m)
|
||||
all_avatars_map[m.id] = m
|
||||
all_avatars_name[m.name] = m
|
||||
await dump_avatars()
|
||||
|
@ -1,119 +1,38 @@
|
||||
import asyncio
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
from typing import List
|
||||
|
||||
import aiofiles
|
||||
import ujson
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import ValidationError
|
||||
|
||||
from func.client import client
|
||||
from func.fetch_materials import all_materials_map, all_materials_name
|
||||
from func.url import info_url
|
||||
from models.enums import Quality, Destiny
|
||||
from models.light_cone import LightCone, LightConePromote, LightConeItem
|
||||
from models.wiki import Children
|
||||
|
||||
all_light_cones: List[LightCone] = []
|
||||
all_light_cones_name: Dict[str, LightCone] = {}
|
||||
from func.client import client, retry
|
||||
from func.data import all_light_cones, all_light_cones_name, dump_light_cones
|
||||
from models.light_cone import YattaLightCone
|
||||
from res_func.url import light_cone_yatta_url
|
||||
|
||||
|
||||
async def fetch_light_cones(data: Children):
|
||||
for content in data.list:
|
||||
m_destiny = Destiny(re.findall(r'命途/(.*?)\\', content.ext)[0])
|
||||
m_quality = Quality(re.findall(r'星级/(.*?)\\', content.ext)[0])
|
||||
light_cone = LightCone(
|
||||
id=content.content_id,
|
||||
name=content.title,
|
||||
desc=content.summary,
|
||||
icon=content.icon,
|
||||
big_pic="",
|
||||
quality=m_quality,
|
||||
destiny=m_destiny,
|
||||
promote=[],
|
||||
)
|
||||
all_light_cones.append(light_cone)
|
||||
all_light_cones_name[light_cone.name] = light_cone
|
||||
@retry
|
||||
async def get_single_light_cone(url: str) -> None:
|
||||
req = await client.get(url)
|
||||
try:
|
||||
light_cone = YattaLightCone(**(req.json()["data"]))
|
||||
except Exception as e:
|
||||
print(f"{url} 获取光锥数据失败")
|
||||
raise e
|
||||
all_light_cones.append(light_cone)
|
||||
all_light_cones_name[light_cone.name] = light_cone
|
||||
|
||||
|
||||
def parse_promote(light_cone: LightCone, soup: BeautifulSoup) -> None:
|
||||
"""解析光锥突破数据"""
|
||||
mater = soup.find("div", {"data-part": "material"})
|
||||
trs = mater.find_all("tr")
|
||||
required_levels = [20, 30, 40, 50, 60, 70]
|
||||
max_level = [30, 40, 50, 60, 70, 80]
|
||||
for i in range(0, min(len(trs), 6)):
|
||||
promote = LightConePromote(
|
||||
required_level=required_levels[i],
|
||||
max_level=max_level[i],
|
||||
items=[],
|
||||
)
|
||||
materials = trs[i].find_all("li", {"class": "obc-tmpl__material-item"})
|
||||
for material in materials:
|
||||
try:
|
||||
mid = int(re.findall(r"content/(\d+)/detail", material.find("a").get("href"))[0])
|
||||
except AttributeError:
|
||||
continue
|
||||
name = material.find("p", {"class": "obc-tmpl__material-name"}).text
|
||||
item = all_materials_map.get(mid)
|
||||
if not item:
|
||||
item = all_materials_name.get(name)
|
||||
try:
|
||||
count = int(material.find("span", {"class": "obc-tmpl__material-num"}).text)
|
||||
except (AttributeError, ValueError):
|
||||
count = 1
|
||||
if name == "信用点":
|
||||
promote.coin = count
|
||||
elif item:
|
||||
promote.items.append(
|
||||
LightConeItem(
|
||||
item=item,
|
||||
count=count,
|
||||
)
|
||||
)
|
||||
else:
|
||||
print(f"unknown material: {mid}: {name}")
|
||||
light_cone.promote.append(promote)
|
||||
@retry
|
||||
async def get_all_light_cones() -> List[str]:
|
||||
req = await client.get(light_cone_yatta_url)
|
||||
return list(req.json()["data"]["items"].keys())
|
||||
|
||||
|
||||
async def fetch_info(light_cone: LightCone):
|
||||
print(f"Fetch light_cone info: {light_cone.id}: {light_cone.name}")
|
||||
params = {
|
||||
'app_sn': 'sr_wiki',
|
||||
'content_id': str(light_cone.id),
|
||||
}
|
||||
resp = await client.get(info_url, params=params)
|
||||
data = resp.json()["data"]["content"]["contents"][0]["text"]
|
||||
soup = BeautifulSoup(data, "lxml")
|
||||
table = soup.find("table", {"class": "obc-tml-light-table--pc"})
|
||||
tr = table.find_all("tr")[-1]
|
||||
td = tr.find_all("td")[-1]
|
||||
light_cone.desc = td.get_text().strip()
|
||||
pic_td = soup.find("td", {"class": "obc-tmpl-character__avatar"})
|
||||
light_cone.big_pic = pic_td.find("img").get("src")
|
||||
parse_promote(light_cone, soup)
|
||||
|
||||
|
||||
async def fetch_light_cones_infos():
|
||||
tasks = []
|
||||
for light_cone in all_light_cones:
|
||||
tasks.append(fetch_info(light_cone))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def dump_light_cones(path: Path):
|
||||
data = [light_cone.dict() for light_cone in all_light_cones]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_light_cones(path: Path):
|
||||
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
all_light_cones.clear()
|
||||
all_light_cones_name.clear()
|
||||
for light_cone in data:
|
||||
m = LightCone(**light_cone)
|
||||
all_light_cones.append(m)
|
||||
all_light_cones_name[m.name] = m
|
||||
async def fetch_light_cones():
|
||||
print("获取光锥数据")
|
||||
light_cones = await get_all_light_cones()
|
||||
for light_cone_id in light_cones:
|
||||
try:
|
||||
await get_single_light_cone(f"{light_cone_yatta_url}/{light_cone_id}")
|
||||
except ValidationError:
|
||||
print(f"{light_cone_yatta_url}/{light_cone_id} 获取光锥数据失败,数据格式异常")
|
||||
print("获取光锥数据完成")
|
||||
await dump_light_cones()
|
||||
|
@ -1,93 +1,50 @@
|
||||
import asyncio
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
|
||||
import aiofiles
|
||||
import ujson
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import ValidationError
|
||||
|
||||
from func.client import client
|
||||
from func.url import info_url
|
||||
from models.enums import Quality, MaterialType
|
||||
from models.material import Material
|
||||
from models.wiki import Children
|
||||
|
||||
star_map = {
|
||||
1: Quality.One,
|
||||
2: Quality.Two,
|
||||
3: Quality.Three,
|
||||
4: Quality.Four,
|
||||
5: Quality.Five,
|
||||
}
|
||||
all_materials: List[Material] = []
|
||||
all_materials_map: Dict[int, Material] = {}
|
||||
all_materials_name: Dict[str, Material] = {}
|
||||
from func.client import client, retry
|
||||
from func.data import (
|
||||
all_materials,
|
||||
all_materials_map,
|
||||
all_materials_name,
|
||||
dump_materials,
|
||||
)
|
||||
from models.material import YattaMaterial
|
||||
from res_func.url import material_yatta_url
|
||||
|
||||
|
||||
async def fetch_materials(data: Children, default: str = "其他材料"):
|
||||
for content in data.list:
|
||||
quality: Quality = Quality.Three
|
||||
if result := re.search(r"(\d+)星", content.ext):
|
||||
quality: Quality = star_map[int(result[1])]
|
||||
def fix_material(values: Dict) -> Dict:
|
||||
if values.get("source") is None:
|
||||
values["source"] = []
|
||||
return values
|
||||
|
||||
|
||||
@retry
|
||||
async def get_single_material(url: str) -> None:
|
||||
req = await client.get(url)
|
||||
try:
|
||||
material = YattaMaterial(**fix_material(req.json()["data"]))
|
||||
except Exception as e:
|
||||
print(f"{url} 获取材料数据失败")
|
||||
raise e
|
||||
all_materials.append(material)
|
||||
all_materials_map[material.id] = material
|
||||
all_materials_name[material.name] = material
|
||||
|
||||
|
||||
@retry
|
||||
async def get_all_materials() -> List[str]:
|
||||
req = await client.get(material_yatta_url)
|
||||
return list(req.json()["data"]["items"].keys())
|
||||
|
||||
|
||||
async def fetch_materials():
|
||||
print("获取材料数据")
|
||||
materials = await get_all_materials()
|
||||
for material_id in materials:
|
||||
try:
|
||||
m_type = re.findall(r'用途/(.*?)\\', content.ext)[0]
|
||||
except IndexError:
|
||||
m_type = default
|
||||
if m_type == "角色晋阶":
|
||||
m_type = "角色晋阶材料"
|
||||
m_type = MaterialType(m_type)
|
||||
material = Material(
|
||||
id=content.content_id,
|
||||
name=content.title,
|
||||
desc=content.summary,
|
||||
icon=content.icon,
|
||||
quality=quality,
|
||||
type=m_type,
|
||||
)
|
||||
all_materials.append(material)
|
||||
all_materials_map[material.id] = material
|
||||
all_materials_name[material.name] = material
|
||||
|
||||
|
||||
async def fetch_info(material: Material):
|
||||
print(f"Fetch material info: {material.id}: {material.name}")
|
||||
params = {
|
||||
'app_sn': 'sr_wiki',
|
||||
'content_id': str(material.id),
|
||||
}
|
||||
resp = await client.get(info_url, params=params)
|
||||
data = resp.json()["data"]["content"]["contents"][0]["text"]
|
||||
soup = BeautifulSoup(data, "lxml")
|
||||
table = soup.find("table", {"class": "material-table--pc"})
|
||||
if result := re.search(r"(\d+)星", table.text):
|
||||
material.quality = star_map[int(result[1])]
|
||||
ps = table.find_all("p", {"style": "white-space: pre-wrap;"})
|
||||
text = ""
|
||||
for p in ps:
|
||||
text += f"{p.get_text()}\n"
|
||||
material.desc = text.strip()
|
||||
|
||||
|
||||
async def fetch_materials_infos():
|
||||
tasks = []
|
||||
for material in all_materials:
|
||||
tasks.append(fetch_info(material))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def dump_materials(path: Path):
|
||||
data = [material.dict() for material in all_materials]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_materials(path: Path):
|
||||
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for material in data:
|
||||
m = Material(**material)
|
||||
all_materials.append(m)
|
||||
all_materials_map[m.id] = m
|
||||
all_materials_name[m.name] = m
|
||||
await get_single_material(f"{material_yatta_url}/{material_id}")
|
||||
except ValidationError:
|
||||
print(f"{material_yatta_url}/{material_id} 获取材料数据失败,数据格式异常")
|
||||
print("获取材料数据完成")
|
||||
await dump_materials()
|
||||
|
@ -1,62 +1,28 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Dict
|
||||
|
||||
import aiofiles
|
||||
import ujson
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import ValidationError
|
||||
|
||||
from func.client import client
|
||||
from func.url import info_url
|
||||
from models.relic import Relic
|
||||
from models.wiki import Children
|
||||
|
||||
all_relics: List[Relic] = []
|
||||
from func.client import client, retry
|
||||
from func.data import all_relics, dump_relics
|
||||
from models.relic import YattaRelic
|
||||
from res_func.url import relic_yatta_url
|
||||
|
||||
|
||||
async def fetch_relics(data: Children):
|
||||
for content in data.list:
|
||||
relic = Relic(
|
||||
id=content.content_id,
|
||||
bbs_id=content.content_id,
|
||||
name=content.title,
|
||||
icon=content.icon,
|
||||
affect="",
|
||||
image_list=[],
|
||||
)
|
||||
all_relics.append(relic)
|
||||
@retry
|
||||
async def get_all_relics() -> List[Dict]:
|
||||
req = await client.get(relic_yatta_url)
|
||||
return list(req.json()["data"]["items"].values())
|
||||
|
||||
|
||||
async def fetch_info(relic: Relic):
|
||||
print(f"Fetch relic info: {relic.id}: {relic.name}")
|
||||
params = {
|
||||
'app_sn': 'sr_wiki',
|
||||
'content_id': str(relic.id),
|
||||
}
|
||||
resp = await client.get(info_url, params=params)
|
||||
data = resp.json()["data"]["content"]["contents"][0]["text"]
|
||||
soup = BeautifulSoup(data, "lxml")
|
||||
relic.affect = soup.find("div", {"class": "obc-tmpl-relic__story"}).get_text().strip()
|
||||
|
||||
|
||||
async def fetch_relics_infos():
|
||||
tasks = []
|
||||
for relic in all_relics:
|
||||
tasks.append(fetch_info(relic))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def dump_relics(path: Path):
|
||||
data = [relic.dict() for relic in all_relics]
|
||||
data.sort(key=lambda x: x["id"])
|
||||
async with aiofiles.open(path, "w", encoding="utf-8") as f:
|
||||
await f.write(ujson.dumps(data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def read_relics(path: Path):
|
||||
all_relics.clear()
|
||||
async with aiofiles.open(path, "r", encoding="utf-8") as f:
|
||||
data = ujson.loads(await f.read())
|
||||
for relic in data:
|
||||
m = Relic(**relic)
|
||||
all_relics.append(m)
|
||||
async def fetch_relics():
|
||||
print("获取遗器数据")
|
||||
relics = await get_all_relics()
|
||||
for relic in relics:
|
||||
try:
|
||||
relic_ = YattaRelic(**relic)
|
||||
all_relics.append(relic_)
|
||||
except ValidationError as e:
|
||||
raise e
|
||||
print(f"{relic} 解析遗器数据失败,数据格式异常")
|
||||
print("获取遗器数据完成")
|
||||
await dump_relics()
|
||||
|
@ -1,3 +1,2 @@
|
||||
base_url = "https://api-static.mihoyo.com/common/blackboard/sr_wiki/v1"
|
||||
list_url = f"{base_url}/home/content/list"
|
||||
info_url = f"{base_url}/content/info"
|
||||
|
79
main.py
79
main.py
@ -1,78 +1,29 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from func.fetch_all import get_list
|
||||
from func.fetch_avatars import fetch_avatars, dump_avatars, read_avatars
|
||||
from func.fetch_light_cones import fetch_light_cones, fetch_light_cones_infos, dump_light_cones, read_light_cones
|
||||
from func.fetch_materials import fetch_materials, fetch_materials_infos, dump_materials, read_materials
|
||||
from func.fetch_relics import fetch_relics, fetch_relics_infos, dump_relics, read_relics
|
||||
from func.fetch_avatars import fetch_avatars
|
||||
from func.fetch_light_cones import fetch_light_cones
|
||||
from func.fetch_materials import fetch_materials
|
||||
from func.fetch_relics import fetch_relics
|
||||
from func.fetch_src import move_files
|
||||
|
||||
data_path = Path("data")
|
||||
data_path.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
async def wiki(
|
||||
override_materials: bool = True,
|
||||
override_avatars: bool = True,
|
||||
override_light_cones: bool = True,
|
||||
override_relics: bool = True,
|
||||
):
|
||||
async def wiki():
|
||||
main_data = await get_list()
|
||||
if override_materials:
|
||||
await fetch_materials(main_data[4])
|
||||
await fetch_materials(main_data[5])
|
||||
await fetch_materials(main_data[12], "消耗品")
|
||||
await fetch_materials(main_data[10], "任务道具")
|
||||
await fetch_materials(main_data[13], "贵重物")
|
||||
await fetch_materials_infos()
|
||||
await dump_materials(data_path / "materials.json")
|
||||
else:
|
||||
await read_materials(data_path / "materials.json")
|
||||
if override_avatars:
|
||||
await fetch_avatars(main_data[0])
|
||||
await dump_avatars(data_path / "avatars.json")
|
||||
else:
|
||||
await read_avatars(data_path / "avatars.json")
|
||||
if override_light_cones:
|
||||
await fetch_light_cones(main_data[1])
|
||||
await fetch_light_cones_infos()
|
||||
await dump_light_cones(data_path / "light_cones.json")
|
||||
else:
|
||||
await read_light_cones(data_path / "light_cones.json")
|
||||
if override_relics:
|
||||
await fetch_relics(main_data[2])
|
||||
await fetch_relics_infos()
|
||||
await dump_relics(data_path / "relics.json")
|
||||
else:
|
||||
await read_relics(data_path / "relics.json")
|
||||
await fetch_avatars(main_data[0])
|
||||
await fetch_light_cones()
|
||||
await fetch_materials()
|
||||
await fetch_relics()
|
||||
|
||||
|
||||
async def bbs_photos():
|
||||
await move_files()
|
||||
|
||||
|
||||
async def main(
|
||||
override_materials: bool = True,
|
||||
override_avatars: bool = True,
|
||||
override_light_cones: bool = True,
|
||||
override_relics: bool = True,
|
||||
override_bbs_photos: bool = True,
|
||||
):
|
||||
await wiki(override_materials, override_avatars, override_light_cones, override_relics)
|
||||
if override_bbs_photos:
|
||||
await bbs_photos()
|
||||
async def main():
|
||||
await wiki()
|
||||
await bbs_photos()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
override_material_ = True
|
||||
override_avatar_ = True
|
||||
override_light_cone_ = True
|
||||
override_relic_ = True
|
||||
override_bbs_photo_ = True
|
||||
asyncio.run(main(
|
||||
override_material_,
|
||||
override_avatar_,
|
||||
override_light_cone_,
|
||||
override_relic_,
|
||||
override_bbs_photo_,
|
||||
))
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
@ -59,10 +59,10 @@ class YattaAvatar(BaseModel):
|
||||
|
||||
@property
|
||||
def destiny(self) -> Destiny:
|
||||
""" 命途 """
|
||||
"""命途"""
|
||||
return Destiny(self.types.pathType.name)
|
||||
|
||||
@property
|
||||
def element(self) -> Element:
|
||||
""" 属性 """
|
||||
"""属性"""
|
||||
return Element(self.types.combatType.name)
|
||||
|
@ -1,51 +1,29 @@
|
||||
# 光锥
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .enums import Quality, Destiny
|
||||
from .material import Material
|
||||
|
||||
class YattaLightConePath(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
|
||||
|
||||
class LightConeItem(BaseModel):
|
||||
item: Material
|
||||
"""物品"""
|
||||
count: int
|
||||
"""数量"""
|
||||
class YattaLightConeTypes(BaseModel):
|
||||
pathType: YattaLightConePath
|
||||
|
||||
|
||||
class LightConePromote(BaseModel):
|
||||
required_level: int
|
||||
"""突破所需等级"""
|
||||
promote_level: int = 0
|
||||
"""突破等级"""
|
||||
max_level: int
|
||||
"""解锁的等级上限"""
|
||||
|
||||
coin: int = 0
|
||||
"""信用点"""
|
||||
items: List[LightConeItem]
|
||||
"""突破所需材料"""
|
||||
|
||||
|
||||
class LightCone(BaseModel):
|
||||
class YattaLightCone(BaseModel):
|
||||
id: int
|
||||
""""光锥ID"""
|
||||
name: str
|
||||
"""名称"""
|
||||
desc: str
|
||||
description: str
|
||||
"""描述"""
|
||||
icon: str
|
||||
icon: str = ""
|
||||
"""图标"""
|
||||
big_pic: str
|
||||
big_pic: str = ""
|
||||
"""大图"""
|
||||
quality: Quality
|
||||
rank: int
|
||||
"""稀有度"""
|
||||
destiny: Destiny
|
||||
types: YattaLightConeTypes
|
||||
"""命途"""
|
||||
promote: List[LightConePromote]
|
||||
"""晋阶信息"""
|
||||
|
||||
@property
|
||||
def rarity(self) -> int:
|
||||
return 5 - list(Quality).index(self.quality)
|
||||
route: str
|
||||
|
@ -1,19 +1,35 @@
|
||||
# 材料
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .enums import Quality, MaterialType
|
||||
|
||||
class MaterialSource(BaseModel):
|
||||
description: str
|
||||
|
||||
|
||||
class Material(BaseModel):
|
||||
class MaterialMType(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class YattaMaterial(BaseModel):
|
||||
id: int
|
||||
"""材料ID"""
|
||||
name: str
|
||||
"""名称"""
|
||||
desc: str
|
||||
"""介绍"""
|
||||
icon: str
|
||||
"""图标"""
|
||||
quality: Quality
|
||||
description: str
|
||||
"""描述"""
|
||||
story: str
|
||||
"""故事"""
|
||||
rank: int
|
||||
"""稀有度"""
|
||||
type: MaterialType
|
||||
source: List[MaterialSource]
|
||||
"""来源"""
|
||||
type: Optional[MaterialMType] = None
|
||||
"""类型"""
|
||||
route: str
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
return f"https://api.yatta.top/hsr/assets/UI/item/{self.id}.png"
|
||||
|
@ -1,25 +0,0 @@
|
||||
# 敌对物种
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .enums import MonsterType, Area
|
||||
|
||||
|
||||
class Monster(BaseModel):
|
||||
id: int
|
||||
"""怪物ID"""
|
||||
name: str
|
||||
"""名称"""
|
||||
desc: str
|
||||
"""介绍"""
|
||||
icon: str
|
||||
"""图标"""
|
||||
big_pic: str
|
||||
"""大图"""
|
||||
type: MonsterType
|
||||
"""种类"""
|
||||
area: Area
|
||||
"""地区"""
|
||||
resistance: str
|
||||
"""抗性"""
|
||||
find_area: str
|
||||
"""发现地点"""
|
@ -4,16 +4,13 @@ from typing import List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Relic(BaseModel):
|
||||
class YattaRelic(BaseModel):
|
||||
id: int
|
||||
"""遗器套装ID"""
|
||||
bbs_id: int
|
||||
"""WIKI ID"""
|
||||
name: str
|
||||
"""套装名称"""
|
||||
icon: str
|
||||
"""套装图标"""
|
||||
affect: str
|
||||
"""套装效果"""
|
||||
image_list: List[str]
|
||||
image_list: List[str] = []
|
||||
"""套装子图"""
|
||||
route: str
|
||||
|
@ -16,7 +16,9 @@ timeout = httpx.Timeout(
|
||||
|
||||
class HTTPXRequest(AbstractAsyncContextManager):
|
||||
def __init__(self, *args, headers=None, **kwargs):
|
||||
self._client = httpx.AsyncClient(headers=headers, timeout=timeout, *args, **kwargs)
|
||||
self._client = httpx.AsyncClient(
|
||||
headers=headers, timeout=timeout, *args, **kwargs
|
||||
)
|
||||
|
||||
async def __aenter__(self):
|
||||
try:
|
||||
@ -27,8 +29,10 @@ class HTTPXRequest(AbstractAsyncContextManager):
|
||||
raise exc
|
||||
|
||||
async def __aexit__(
|
||||
self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType]
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
await self.initialize()
|
||||
|
||||
|
@ -12,16 +12,25 @@ __all__ = ("HyperionRequest",)
|
||||
|
||||
class HyperionRequest(HTTPXRequest):
|
||||
async def get(
|
||||
self, url: str, *args, de_json: bool = True, re_json_data: bool = False, **kwargs
|
||||
self,
|
||||
url: str,
|
||||
*args,
|
||||
de_json: bool = True,
|
||||
re_json_data: bool = False,
|
||||
**kwargs,
|
||||
) -> Union[POST_DATA, JSON_DATA, Response]:
|
||||
try:
|
||||
response = await self._client.get(url=url, *args, **kwargs)
|
||||
except httpx.TimeoutException as err:
|
||||
raise APIHelperTimedOut from err
|
||||
except httpx.HTTPError as exc:
|
||||
raise NetworkException(f"Unknown error in HTTP implementation: {repr(exc)}") from exc
|
||||
raise NetworkException(
|
||||
f"Unknown error in HTTP implementation: {repr(exc)}"
|
||||
) from exc
|
||||
if response.is_error:
|
||||
raise ResponseException(message=f"response error in status code: {response.status_code}")
|
||||
raise ResponseException(
|
||||
message=f"response error in status code: {response.status_code}"
|
||||
)
|
||||
if not de_json:
|
||||
return response
|
||||
json_data = response.json()
|
||||
@ -32,23 +41,34 @@ class HyperionRequest(HTTPXRequest):
|
||||
return json_data
|
||||
if return_code != 0:
|
||||
if message is None:
|
||||
raise ResponseException(message=f"response error in return code: {return_code}")
|
||||
raise ResponseException(
|
||||
message=f"response error in return code: {return_code}"
|
||||
)
|
||||
raise ResponseException(response=json_data)
|
||||
if not re_json_data and data is not None:
|
||||
return data
|
||||
return json_data
|
||||
|
||||
async def post(
|
||||
self, url: str, *args, de_json: bool = True, re_json_data: bool = False, **kwargs
|
||||
self,
|
||||
url: str,
|
||||
*args,
|
||||
de_json: bool = True,
|
||||
re_json_data: bool = False,
|
||||
**kwargs,
|
||||
) -> Union[POST_DATA, JSON_DATA, Response]:
|
||||
try:
|
||||
response = await self._client.post(url=url, *args, **kwargs)
|
||||
except httpx.TimeoutException as err:
|
||||
raise APIHelperTimedOut from err
|
||||
except httpx.HTTPError as exc:
|
||||
raise NetworkException(f"Unknown error in HTTP implementation: {repr(exc)}") from exc
|
||||
raise NetworkException(
|
||||
f"Unknown error in HTTP implementation: {repr(exc)}"
|
||||
) from exc
|
||||
if response.is_error:
|
||||
raise ResponseException(message=f"response error in status code: {response.status_code}")
|
||||
raise ResponseException(
|
||||
message=f"response error in status code: {response.status_code}"
|
||||
)
|
||||
if not de_json:
|
||||
return response
|
||||
json_data = response.json()
|
||||
@ -59,7 +79,9 @@ class HyperionRequest(HTTPXRequest):
|
||||
return json_data
|
||||
if return_code != 0:
|
||||
if message is None:
|
||||
raise ResponseException(message=f"response error in return code: {return_code}")
|
||||
raise ResponseException(
|
||||
message=f"response error in return code: {return_code}"
|
||||
)
|
||||
raise ResponseException(response=json_data)
|
||||
if not re_json_data and data is not None:
|
||||
return data
|
||||
|
@ -17,9 +17,13 @@ class Hyperion:
|
||||
"""
|
||||
|
||||
POST_FULL_URL = "https://bbs-api.miyoushe.com/post/wapi/getPostFull"
|
||||
POST_FULL_IN_COLLECTION_URL = "https://bbs-api.miyoushe.com/post/wapi/getPostFullInCollection"
|
||||
POST_FULL_IN_COLLECTION_URL = (
|
||||
"https://bbs-api.miyoushe.com/post/wapi/getPostFullInCollection"
|
||||
)
|
||||
GET_NEW_LIST_URL = "https://bbs-api.miyoushe.com/post/wapi/getNewsList"
|
||||
GET_OFFICIAL_RECOMMENDED_POSTS_URL = "https://bbs-api.miyoushe.com/post/wapi/getOfficialRecommendedPosts"
|
||||
GET_OFFICIAL_RECOMMENDED_POSTS_URL = (
|
||||
"https://bbs-api.miyoushe.com/post/wapi/getOfficialRecommendedPosts"
|
||||
)
|
||||
|
||||
USER_AGENT = (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
@ -39,7 +43,9 @@ class Hyperion:
|
||||
# https://m.miyoushe.com/ys/#/article/32497914
|
||||
:return: post_id
|
||||
"""
|
||||
rgx = re.compile(r"(?:bbs|www\.)?(?:miyoushe|mihoyo)\.com/[^.]+/article/(?P<article_id>\d+)")
|
||||
rgx = re.compile(
|
||||
r"(?:bbs|www\.)?(?:miyoushe|mihoyo)\.com/[^.]+/article/(?P<article_id>\d+)"
|
||||
)
|
||||
matches = rgx.search(text)
|
||||
if matches is None:
|
||||
return -1
|
||||
@ -56,7 +62,9 @@ class Hyperion:
|
||||
return {"User-Agent": self.USER_AGENT, "Referer": referer}
|
||||
|
||||
@staticmethod
|
||||
def get_list_url_params(forum_id: int, is_good: bool = False, is_hot: bool = False, page_size: int = 20) -> dict:
|
||||
def get_list_url_params(
|
||||
forum_id: int, is_good: bool = False, is_hot: bool = False, page_size: int = 20
|
||||
) -> dict:
|
||||
return {
|
||||
"forum_id": forum_id,
|
||||
"gids": 2,
|
||||
@ -68,7 +76,11 @@ class Hyperion:
|
||||
|
||||
@staticmethod
|
||||
def get_images_params(
|
||||
resize: int = 600, quality: int = 80, auto_orient: int = 0, interlace: int = 1, images_format: str = "jpg"
|
||||
resize: int = 600,
|
||||
quality: int = 80,
|
||||
auto_orient: int = 0,
|
||||
interlace: int = 1,
|
||||
images_format: str = "jpg",
|
||||
):
|
||||
"""
|
||||
image/resize,s_600/quality,q_80/auto-orient,0/interlace,1/format,jpg
|
||||
@ -87,12 +99,22 @@ class Hyperion:
|
||||
|
||||
async def get_official_recommended_posts(self, gids: int) -> JSON_DATA:
|
||||
params = {"gids": gids}
|
||||
response = await self.client.get(url=self.GET_OFFICIAL_RECOMMENDED_POSTS_URL, params=params)
|
||||
response = await self.client.get(
|
||||
url=self.GET_OFFICIAL_RECOMMENDED_POSTS_URL, params=params
|
||||
)
|
||||
return response
|
||||
|
||||
async def get_post_full_in_collection(self, collection_id: int, gids: int = 2, order_type=1) -> JSON_DATA:
|
||||
params = {"collection_id": collection_id, "gids": gids, "order_type": order_type}
|
||||
response = await self.client.get(url=self.POST_FULL_IN_COLLECTION_URL, params=params)
|
||||
async def get_post_full_in_collection(
|
||||
self, collection_id: int, gids: int = 2, order_type=1
|
||||
) -> JSON_DATA:
|
||||
params = {
|
||||
"collection_id": collection_id,
|
||||
"gids": gids,
|
||||
"order_type": order_type,
|
||||
}
|
||||
response = await self.client.get(
|
||||
url=self.POST_FULL_IN_COLLECTION_URL, params=params
|
||||
)
|
||||
return response
|
||||
|
||||
async def get_post_info(self, gids: int, post_id: int, read: int = 1) -> PostInfo:
|
||||
@ -100,7 +122,9 @@ class Hyperion:
|
||||
response = await self.client.get(self.POST_FULL_URL, params=params)
|
||||
return PostInfo.paste_data(response)
|
||||
|
||||
async def get_images_by_post_id(self, gids: int, post_id: int) -> List[ArtworkImage]:
|
||||
async def get_images_by_post_id(
|
||||
self, gids: int, post_id: int
|
||||
) -> List[ArtworkImage]:
|
||||
post_info = await self.get_post_info(gids, post_id)
|
||||
art_list = []
|
||||
task_list = [
|
||||
@ -118,15 +142,23 @@ class Hyperion:
|
||||
art_list.sort(key=take_page)
|
||||
return art_list
|
||||
|
||||
async def download_image(self, art_id: int, url: str, page: int = 0) -> ArtworkImage:
|
||||
async def download_image(
|
||||
self, art_id: int, url: str, page: int = 0
|
||||
) -> ArtworkImage:
|
||||
filename = os.path.basename(url)
|
||||
_, file_extension = os.path.splitext(filename)
|
||||
is_image = bool(file_extension in ".jpg" or file_extension in ".png")
|
||||
response = await self.client.get(
|
||||
url, params=self.get_images_params(resize=2000) if is_image else None, de_json=False
|
||||
url,
|
||||
params=self.get_images_params(resize=2000) if is_image else None,
|
||||
de_json=False,
|
||||
)
|
||||
return ArtworkImage(
|
||||
art_id=art_id, page=page, file_name=filename, file_extension=url.split(".")[-1], data=response.content
|
||||
art_id=art_id,
|
||||
page=page,
|
||||
file_name=filename,
|
||||
file_extension=url.split(".")[-1],
|
||||
data=response.content,
|
||||
)
|
||||
|
||||
async def get_new_list(self, gids: int, type_id: int, page_size: int = 20):
|
||||
|
@ -17,7 +17,11 @@ class ResponseException(APIHelperException):
|
||||
code: int = 0
|
||||
message: str = ""
|
||||
|
||||
def __init__(self, response: Optional[Mapping[str, Any]] = None, message: Optional[str] = None) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
response: Optional[Mapping[str, Any]] = None,
|
||||
message: Optional[str] = None,
|
||||
) -> None:
|
||||
if response is None:
|
||||
self.message = message
|
||||
_message = message
|
||||
|
@ -7,7 +7,7 @@ import aiofiles
|
||||
import ujson
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
from func.fetch_avatars import read_avatars, all_avatars_name, dump_avatars, all_avatars_map
|
||||
from func.data import all_avatars_map, all_avatars_name, read_avatars, dump_avatars
|
||||
from models.avatar_config import AvatarConfig, AvatarIcon
|
||||
from .client import client
|
||||
from .url import avatar_config, text_map, base_station_url, avatar_url
|
||||
@ -48,8 +48,14 @@ async def parse_station(datas, name: str, tag: Tag, cid: int):
|
||||
|
||||
third_pic = get_third_pic()
|
||||
text = soup.find("div", {"class": "a6678 a4af5"}).get("style")
|
||||
four_pic = f'{base_station_url}{text[text.find("(") + 2:text.find(")") - 1]}' if text else ""
|
||||
first_pic = f'{base_station_url}{soup.find("img", {"class": "ac39b a6602"}).get("src")}'
|
||||
four_pic = (
|
||||
f'{base_station_url}{text[text.find("(") + 2:text.find(")") - 1]}'
|
||||
if text
|
||||
else ""
|
||||
)
|
||||
first_pic = (
|
||||
f'{base_station_url}{soup.find("img", {"class": "ac39b a6602"}).get("src")}'
|
||||
)
|
||||
datas.append(
|
||||
AvatarIcon(
|
||||
id=cid,
|
||||
@ -67,10 +73,7 @@ async def dump_icons(path: Path, datas: List[AvatarIcon]):
|
||||
|
||||
|
||||
async def fetch_station_ktz(tasks, datas, player_avatars: List[Tag]):
|
||||
data_map = {
|
||||
"开拓者·毁灭": (8001, 8002),
|
||||
"开拓者·存护": (8003, 8004)
|
||||
}
|
||||
data_map = {"开拓者·毁灭": (8001, 8002), "开拓者·存护": (8003, 8004)}
|
||||
idx = 0
|
||||
for key, value in data_map.items():
|
||||
tasks.append(parse_station(datas, key, player_avatars[idx], value[0]))
|
||||
@ -92,7 +95,11 @@ async def fetch_station(configs_map: Dict[str, AvatarConfig]) -> List[AvatarIcon
|
||||
player_avatars.append(avatar)
|
||||
continue
|
||||
avatar_model = configs_map.get(name)
|
||||
tasks.append(parse_station(datas, name, avatar, avatar_model.AvatarID if avatar_model else None))
|
||||
tasks.append(
|
||||
parse_station(
|
||||
datas, name, avatar, avatar_model.AvatarID if avatar_model else None
|
||||
)
|
||||
)
|
||||
await fetch_station_ktz(tasks, datas, player_avatars)
|
||||
await asyncio.gather(*tasks)
|
||||
return datas
|
||||
@ -110,8 +117,8 @@ async def fix_avatar_config(text_map_data: Dict[str, str]):
|
||||
configs_map: Dict[str, AvatarConfig] = {config.name: config for config in configs}
|
||||
print(f"读取到原始数据:{list(configs_map.keys())}")
|
||||
data_path = Path("data")
|
||||
await read_avatars(data_path / "avatars.json")
|
||||
await read_avatars()
|
||||
await fix_avatar_config_ktz()
|
||||
icons = await fetch_station(configs_map)
|
||||
await dump_icons(data_path / "avatar_icons.json", icons)
|
||||
await dump_avatars(data_path / "avatars.json")
|
||||
await dump_avatars()
|
||||
|
@ -6,7 +6,7 @@ import aiofiles
|
||||
import ujson
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
from func.fetch_light_cones import read_light_cones, all_light_cones_name, dump_light_cones
|
||||
from func.data import all_light_cones_map, read_light_cones, dump_light_cones
|
||||
from models.light_cone_config import LightConeIcon
|
||||
from .client import client
|
||||
from .url import base_station_url, light_cone_url
|
||||
@ -18,6 +18,11 @@ async def parse_station(icon: LightConeIcon, tag: Tag):
|
||||
first_pic = f'{base_station_url}{soup.find("img", {"class": "standard-icon a6602"}).get("src")}'
|
||||
second_pic = f'{base_station_url}{soup.find("img", {"class": "a2b16 mobile-only-elem ab8c3"}).get("src")}'
|
||||
icon.icon = [first_pic, second_pic]
|
||||
if light_cone_model := all_light_cones_map.get(icon.id):
|
||||
light_cone_model.icon = first_pic
|
||||
light_cone_model.big_pic = second_pic
|
||||
else:
|
||||
print(f"yatta 未找到光锥数据 {icon.name} ,修复 图标 失败")
|
||||
|
||||
|
||||
async def dump_icons(path: Path, datas: List[LightConeIcon]):
|
||||
@ -42,15 +47,7 @@ async def fetch_station() -> List[LightConeIcon]:
|
||||
if "lightcone/" not in url:
|
||||
continue
|
||||
nid = int(url.split("/")[-1])
|
||||
if light_cone_model := all_light_cones_name.get(name):
|
||||
light_cone_model.id = nid
|
||||
else:
|
||||
print(f"wiki 未找到光锥数据 {name} ,修复 id 失败")
|
||||
icon = LightConeIcon(
|
||||
id=nid,
|
||||
name=name,
|
||||
icon=[]
|
||||
)
|
||||
icon = LightConeIcon(id=nid, name=name, icon=[])
|
||||
datas.append(icon)
|
||||
tasks.append(parse_station(icon, light_cone))
|
||||
await asyncio.gather(*tasks)
|
||||
@ -59,7 +56,7 @@ async def fetch_station() -> List[LightConeIcon]:
|
||||
|
||||
async def fix_light_cone_config():
|
||||
data_path = Path("data")
|
||||
await read_light_cones(data_path / "light_cones.json")
|
||||
await read_light_cones()
|
||||
icons = await fetch_station()
|
||||
await dump_icons(data_path / "light_cone_icons.json", icons)
|
||||
await dump_light_cones(data_path / "light_cones.json")
|
||||
await dump_light_cones()
|
||||
|
@ -4,11 +4,10 @@ from typing import List, Dict
|
||||
import aiofiles
|
||||
import ujson
|
||||
|
||||
from func.fetch_relics import read_relics, dump_relics, all_relics
|
||||
from models.enums import RelicAffix, RelicPosition
|
||||
from models.relic_affix import RelicAffixAll, SingleRelicAffix
|
||||
from res_func.client import client
|
||||
from res_func.url import relic_config, relic_main_affix_config, relic_sub_affix_config, relic_set_config
|
||||
from res_func.url import relic_config, relic_main_affix_config, relic_sub_affix_config
|
||||
|
||||
final_datas: List[RelicAffixAll] = []
|
||||
final_datas_map: Dict[str, RelicAffixAll] = {}
|
||||
@ -78,23 +77,6 @@ async def fetch_sub_affix():
|
||||
print("遗器副词条配置获取完毕")
|
||||
|
||||
|
||||
async def fix_set_id(text_map_data: Dict[str, str]):
|
||||
print("开始修复遗器套装ID")
|
||||
set_req = await client.get(relic_set_config)
|
||||
set_data = set_req.json()
|
||||
set_data_map: Dict[str, int] = {}
|
||||
for key, value in set_data.items():
|
||||
set_data_map[text_map_data[str(value["SetName"]["Hash"])]] = value["SetID"]
|
||||
await read_relics(Path("data") / "relics.json")
|
||||
for relic_set in all_relics:
|
||||
if set_id := set_data_map.get(relic_set.name):
|
||||
relic_set.id = set_id
|
||||
else:
|
||||
print(f"套装{relic_set.name}没有找到对应的ID")
|
||||
await dump_relics(Path("data") / "relics.json")
|
||||
print("遗器套装ID修复完毕")
|
||||
|
||||
|
||||
async def dump_relic_config(path: Path):
|
||||
final_data = [data.dict() for data in final_datas]
|
||||
final_data.sort(key=lambda x: x["id"])
|
||||
@ -102,10 +84,9 @@ async def dump_relic_config(path: Path):
|
||||
await f.write(ujson.dumps(final_data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
async def fetch_relic_config(text_map_data: Dict[str, str]):
|
||||
async def fetch_relic_config():
|
||||
await fetch_all_relic()
|
||||
await fetch_main_affix()
|
||||
await fetch_sub_affix()
|
||||
data_path = Path("data")
|
||||
await dump_relic_config(data_path / "relic_config.json")
|
||||
await fix_set_id(text_map_data)
|
||||
|
@ -1,18 +1,15 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, Tuple, List
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from func.fetch_relics import all_relics, read_relics, dump_relics
|
||||
from func.data import read_relics, all_relics, dump_relics
|
||||
from res_func.client import client
|
||||
from res_func.url import relic_url, base_station_url
|
||||
|
||||
relics_path = Path("data") / "relics.json"
|
||||
|
||||
|
||||
async def fix_set_image():
|
||||
print("开始修复遗器套装图片")
|
||||
await read_relics(relics_path)
|
||||
await read_relics()
|
||||
req = await client.get(relic_url)
|
||||
soup = BeautifulSoup(req.text, "lxml")
|
||||
divs = soup.find_all("a", {"class": "aff5a"})
|
||||
@ -36,5 +33,5 @@ async def fix_set_image():
|
||||
relic.image_list = data_map[relic.id][1]
|
||||
else:
|
||||
print(f"套装 {relic.id} 没有找到对应的图片")
|
||||
await dump_relics(relics_path)
|
||||
await dump_relics()
|
||||
print("遗器套装图片修复完毕")
|
||||
|
@ -16,3 +16,6 @@ relic_url = f"{base_station_url}/cn/relics"
|
||||
base_yatta_url = "https://api.yatta.top"
|
||||
avatar_yatta_url = f"{base_yatta_url}/hsr/v2/cn/avatar"
|
||||
avatar_skill_url = f"{base_yatta_url}/hsr/assets/UI/skill/"
|
||||
light_cone_yatta_url = f"{base_yatta_url}/hsr/v2/cn/equipment"
|
||||
material_yatta_url = f"{base_yatta_url}/hsr/v2/cn/item"
|
||||
relic_yatta_url = f"{base_yatta_url}/hsr/v2/cn/relic"
|
||||
|
@ -6,7 +6,8 @@ from typing import List
|
||||
import aiofiles
|
||||
import ujson
|
||||
|
||||
from func.fetch_avatars import all_avatars
|
||||
from func.client import retry
|
||||
from func.data import all_avatars
|
||||
from models.avatar import YattaAvatar
|
||||
from res_func.client import client
|
||||
from res_func.url import avatar_skill_url
|
||||
@ -17,18 +18,6 @@ avatars_skills_path = Path("data/skill")
|
||||
avatars_skills_path.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
|
||||
def retry(func):
|
||||
async def wrapper(*args, **kwargs):
|
||||
for i in range(3):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except Exception:
|
||||
print(f"重试 {func.__name__} {i + 1} 次")
|
||||
await asyncio.sleep(1)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@retry
|
||||
async def get_single_avatar_skill_icon(url: str, real_path: str) -> None:
|
||||
req = await client.get(url)
|
||||
@ -51,7 +40,9 @@ async def get_single_avatar_skill_icon(url: str, real_path: str) -> None:
|
||||
|
||||
async def dump_icons():
|
||||
final_data = dict(sorted(avatar_data.items(), key=lambda x: x[0]))
|
||||
async with aiofiles.open("data/avatar_eidolon_icons.json", "w", encoding="utf-8") as f:
|
||||
async with aiofiles.open(
|
||||
"data/avatar_eidolon_icons.json", "w", encoding="utf-8"
|
||||
) as f:
|
||||
await f.write(ujson.dumps(final_data, indent=4, ensure_ascii=False))
|
||||
|
||||
|
||||
@ -78,11 +69,13 @@ async def get_all_avatars_skills_icons(avatars: List[YattaAvatar]):
|
||||
tasks.append(
|
||||
get_single_avatar_skill_icon(
|
||||
f"{avatar_skill_url}SkillIcon_{avatar.id}_{remote_path[i]}.png",
|
||||
f"{avatar.id}_{local_path[i]}.png"
|
||||
f"{avatar.id}_{local_path[i]}.png",
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*tasks)
|
||||
tasks.clear()
|
||||
datas = [file.name.split(".")[0] for file in avatars_skills_path.glob("*")]
|
||||
async with aiofiles.open(avatars_skills_path / "info.json", "w", encoding="utf-8") as f:
|
||||
async with aiofiles.open(
|
||||
avatars_skills_path / "info.json", "w", encoding="utf-8"
|
||||
) as f:
|
||||
await f.write(json.dumps(datas, indent=4, ensure_ascii=False))
|
||||
|
Loading…
Reference in New Issue
Block a user