gacha_log 支持导入从 paimon.moe 和非小酋导出的抽卡记录

This commit is contained in:
SiHuaN 2022-10-20 15:48:40 +08:00 committed by GitHub
parent 0e3e6fcbb6
commit 54146a97af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 205 additions and 22 deletions

View File

@ -263,11 +263,13 @@ class GachaLog:
return False, "导入失败,数据格式错误" return False, "导入失败,数据格式错误"
@staticmethod @staticmethod
async def import_gacha_log_data(user_id: int, client: Client, data: dict): async def import_gacha_log_data(
user_id: int, client: Client, data: dict, verify_uid: bool = True
) -> Tuple[bool, str]:
new_num = 0 new_num = 0
try: try:
uid = data["info"]["uid"] uid = data["info"]["uid"]
if int(uid) != client.uid: if verify_uid and int(uid) != client.uid:
raise GachaLogAccountNotFound raise GachaLogAccountNotFound
# 检查导入数据是否合法 # 检查导入数据是否合法
all_items = [GachaItem(**i) for i in data["list"]] all_items = [GachaItem(**i) for i in data["list"]]

View File

@ -1,8 +1,13 @@
import json import json
from datetime import datetime
from enum import Enum
from io import BytesIO from io import BytesIO
from os import sep
import genshin import genshin
from genshin.models import BannerType from genshin.models import BannerType
from modules.apihelper.gacha_log import GachaLog as GachaLogService
from openpyxl import load_workbook
from telegram import Update, User, Message, Document, InlineKeyboardButton, InlineKeyboardMarkup from telegram import Update, User, Message, Document, InlineKeyboardButton, InlineKeyboardMarkup
from telegram.constants import ChatAction from telegram.constants import ChatAction
from telegram.ext import CallbackContext, CommandHandler, MessageHandler, filters, ConversationHandler from telegram.ext import CallbackContext, CommandHandler, MessageHandler, filters, ConversationHandler
@ -15,7 +20,6 @@ from core.plugin import Plugin, handler, conversation
from core.template import TemplateService from core.template import TemplateService
from core.user import UserService from core.user import UserService
from core.user.error import UserNotFoundError from core.user.error import UserNotFoundError
from modules.apihelper.gacha_log import GachaLog as GachaLogService
from modules.apihelper.hyperion import SignIn from modules.apihelper.hyperion import SignIn
from utils.bot import get_all_args from utils.bot import get_all_args
from utils.decorators.admins import bot_admins_rights_check from utils.decorators.admins import bot_admins_rights_check
@ -55,7 +59,7 @@ class GachaLog(Plugin.Conversation, BasePlugin.Conversation):
return url return url
@staticmethod @staticmethod
async def _refresh_user_data(user: User, data: dict = None, authkey: str = None) -> str: async def _refresh_user_data(user: User, data: dict = None, authkey: str = None, verify_uid: bool = True) -> str:
"""刷新用户数据 """刷新用户数据
:param user: 用户 :param user: 用户
:param data: 数据 :param data: 数据
@ -68,24 +72,172 @@ class GachaLog(Plugin.Conversation, BasePlugin.Conversation):
if authkey: if authkey:
return await GachaLogService.get_gacha_log_data(user.id, client, authkey) return await GachaLogService.get_gacha_log_data(user.id, client, authkey)
if data: if data:
return await GachaLogService.import_gacha_log_data(user.id, client, data) return await GachaLogService.import_gacha_log_data(user.id, client, data, verify_uid)
except UserNotFoundError: except UserNotFoundError:
logger.info(f"未查询到用户({user.full_name} {user.id}) 所绑定的账号信息") logger.info(f"未查询到用户({user.full_name} {user.id}) 所绑定的账号信息")
return "派蒙没有找到您所绑定的账号信息,请先私聊派蒙绑定账号" return "派蒙没有找到您所绑定的账号信息,请先私聊派蒙绑定账号"
@staticmethod
def convert_paimonmoe_to_uigf(data: BytesIO) -> dict:
"""转换 paimone.moe 或 非小酋 导出 xlsx 数据为 UIGF 格式
:param data: paimon.moe 导出的 xlsx 数据
:return: UIGF 格式数据
"""
PAIMONMOE_VERSION = 3
PM2UIGF_VERSION = 1
PM2UIGF_NAME = "paimon_moe_to_uigf"
UIGF_VERSION = "v2.2"
with open(f"resources{sep}json{sep}zh.json", "r") as load_f:
zh_dict = json.load(load_f)
class XlsxType(Enum):
PAIMONMOE = 1
FXQ = 2
class ItemType(Enum):
CHARACTER = "角色"
WEAPON = "武器"
class UIGFGachaType(Enum):
BEGINNER = 100
STANDARD = 200
CHARACTER = 301
WEAPON = 302
class Qiyr:
def __init__(
self, uigf_gacha_type: UIGFGachaType, item_type: ItemType, name: str, time: datetime, p: int, _id: int
) -> None:
self.uigf_gacha_type = uigf_gacha_type
self.item_type = item_type
self.name = name
self.time = time
self.rank_type = p
self.id = _id
def qy2_json(self):
return {
"gacha_type": self.uigf_gacha_type.value, # 注意!
"item_id": "",
"count": -1,
"time": self.time.strftime("%Y-%m-%d %H:%M:%S"),
"name": self.name,
"item_type": self.item_type.value,
"rank_type": self.rank_type,
"id": self.id,
"uigf_gacha_type": self.uigf_gacha_type.value,
}
def from_paimon_moe(uigf_gacha_type: UIGFGachaType, item_type: str, name: str, time: str, p: int) -> Qiyr:
item_type = ItemType.CHARACTER if type == "Character" else ItemType.WEAPON
name = zh_dict[name]
time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
return Qiyr(uigf_gacha_type, item_type, name, time, p, 0)
def from_fxq(uigf_gacha_type: UIGFGachaType, item_type: str, name: str, time: str, p: int, _id: int) -> Qiyr:
item_type = ItemType.CHARACTER if type == "角色" else ItemType.WEAPON
time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
return Qiyr(uigf_gacha_type, item_type, name, time, p, _id)
class uigf:
qiyes: list[Qiyr]
uid: int
export_time: datetime
export_app: str = PM2UIGF_NAME
export_app_version: str = PM2UIGF_VERSION
uigf_version = UIGF_VERSION
lang = "zh-cn"
def __init__(self, qiyes: list[Qiyr], uid: int, export_time: datetime) -> None:
self.uid = uid
self.qiyes = qiyes
self.qiyes.sort(key=lambda x: x.time)
if self.qiyes[0].id == 0: # 如果是从paimon.moe导入的那么就给id赋值
for index, _ in enumerate(self.qiyes):
self.qiyes[index].id = index + 1
self.export_time = export_time
self.export_time = export_time
def export_json(self) -> dict:
json_d = {
"info": {
"uid": self.uid,
"lang": self.lang,
"export_time": self.export_time.strftime("%Y-%m-%d %H:%M:%S"),
"export_timestamp": self.export_time.timestamp(),
"export_app": self.export_app,
"export_app_version": self.export_app_version,
"uigf_version": self.uigf_version,
},
"list": [],
}
for qiye in self.qiyes:
json_d["list"].append(qiye.qy2_json())
return json_d
wb = load_workbook(data)
xlsx_type = XlsxType.PAIMONMOE if len(wb.worksheets) == 6 else XlsxType.FXQ # 判断是paimon.moe还是非小酋导出的
paimonmoe_sheets = {
UIGFGachaType.BEGINNER: "Beginners' Wish",
UIGFGachaType.STANDARD: "Standard",
UIGFGachaType.CHARACTER: "Character Event",
UIGFGachaType.WEAPON: "Weapon Event",
}
fxq_sheets = {
UIGFGachaType.BEGINNER: "新手祈愿",
UIGFGachaType.STANDARD: "常驻祈愿",
UIGFGachaType.CHARACTER: "角色活动祈愿",
UIGFGachaType.WEAPON: "武器活动祈愿",
}
qiyes = []
if xlsx_type == XlsxType.PAIMONMOE:
ws = wb["Information"]
if ws["B2"].value != PAIMONMOE_VERSION:
raise Exception("PaimonMoe version not supported")
export_time = datetime.strptime(ws["B3"].value, "%Y-%m-%d %H:%M:%S")
for gacha_type in paimonmoe_sheets:
ws = wb[paimonmoe_sheets[gacha_type]]
for row in ws.iter_rows(min_row=2, values_only=True):
if row[0] is None:
break
qiyes.append(from_paimon_moe(gacha_type, row[0], row[1], row[2], row[3]))
else:
export_time = datetime.now()
for gacha_type in fxq_sheets:
ws = wb[fxq_sheets[gacha_type]]
for row in ws.iter_rows(min_row=2, values_only=True):
if row[0] is None:
break
qiyes.append(from_fxq(gacha_type, row[2], row[1], row[0], row[3], row[6]))
u = uigf(qiyes, 0, export_time)
return u.export_json()
async def import_from_file(self, user: User, message: Message, document: Document = None) -> None: async def import_from_file(self, user: User, message: Message, document: Document = None) -> None:
if not document: if not document:
document = message.document document = message.document
if not document.file_name.endswith(".json"): # TODO: 使用 mimetype 判断文件类型
await message.reply_text("文件格式错误,请发送符合 UIGF 标准的抽卡记录文件") if document.file_name.endswith(".xlsx"):
file_type = "xlsx"
elif document.file_name.endswith(".json"):
file_type = "json"
else:
await message.reply_text("文件格式错误,请发送符合 UIGF 标准的 json 格式的抽卡记录文件或者 paimon.moe、非小酋导出的 xlsx 格式的抽卡记录文件")
if document.file_size > 2 * 1024 * 1024: if document.file_size > 2 * 1024 * 1024:
await message.reply_text("文件过大,请发送小于 2 MB 的文件") await message.reply_text("文件过大,请发送小于 2 MB 的文件")
try: try:
data = BytesIO() data = BytesIO()
await (await document.get_file()).download(out=data) await (await document.get_file()).download(out=data)
# bytesio to json if file_type == "json":
data = data.getvalue().decode("utf-8") # bytesio to json
data = json.loads(data) data = data.getvalue().decode("utf-8")
data = json.loads(data)
else:
data = self.convert_paimonmoe_to_uigf(data)
except UnicodeDecodeError: except UnicodeDecodeError:
await message.reply_text("文件解析失败,请检查文件编码是否正确或符合 UIGF 标准") await message.reply_text("文件解析失败,请检查文件编码是否正确或符合 UIGF 标准")
return return
@ -97,7 +249,7 @@ class GachaLog(Plugin.Conversation, BasePlugin.Conversation):
reply = await message.reply_text("文件解析成功,正在导入数据") reply = await message.reply_text("文件解析成功,正在导入数据")
await message.reply_chat_action(ChatAction.TYPING) await message.reply_chat_action(ChatAction.TYPING)
try: try:
text = await self._refresh_user_data(user, data=data) text = await self._refresh_user_data(user, data=data, verify_uid=file_type == "json")
except Exception as exc: # pylint: disable=W0703 except Exception as exc: # pylint: disable=W0703
logger.error(f"文件解析失败:{repr(exc)}") logger.error(f"文件解析失败:{repr(exc)}")
text = "文件解析失败,请检查文件是否符合 UIGF 标准" text = "文件解析失败,请检查文件是否符合 UIGF 标准"

49
poetry.lock generated
View File

@ -159,7 +159,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]] [[package]]
name = "backports.zoneinfo" name = "backports.zoneinfo"
@ -234,7 +234,7 @@ optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
unicode-backport = ["unicodedata2"] unicode_backport = ["unicodedata2"]
[[package]] [[package]]
name = "click" name = "click"
@ -313,6 +313,14 @@ url = "https://github.com/mrwan200/EnkaNetwork.py"
reference = "HEAD" reference = "HEAD"
resolved_reference = "03fd20e5f838727b7c394c11ea1360cfc3272d69" resolved_reference = "03fd20e5f838727b7c394c11ea1360cfc3272d69"
[[package]]
name = "et-xmlfile"
version = "1.1.0"
description = "An implementation of lxml.xmlfile for the standard library"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "fakeredis" name = "fakeredis"
version = "1.9.4" version = "1.9.4"
@ -601,6 +609,17 @@ category = "main"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
[[package]]
name = "openpyxl"
version = "3.0.10"
description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
et-xmlfile = "*"
[[package]] [[package]]
name = "packaging" name = "packaging"
version = "21.3" version = "21.3"
@ -1022,19 +1041,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"] asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"] mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"] mssql_pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"] mssql_pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql-connector = ["mysql-connector-python"] mysql_connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"] postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql_psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"] pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"] sqlcipher = ["sqlcipher3_binary"]
@ -1266,7 +1285,7 @@ test = ["pytest", "pytest-asyncio", "flaky"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "d91f61fd8b26f32e2a8b4dfd790f9471276f386a4391ddbf141cf394bb074759" content-hash = "5068cee1f0c7399e9021e86040bc5ace24eab3428fc43b1b6985732175c7a69a"
[metadata.files] [metadata.files]
aiofiles = [ aiofiles = [
@ -1491,6 +1510,10 @@ Deprecated = [
{file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
] ]
"enkanetwork.py" = [] "enkanetwork.py" = []
et-xmlfile = [
{file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
{file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
]
fakeredis = [ fakeredis = [
{file = "fakeredis-1.9.4-py3-none-any.whl", hash = "sha256:61afe14095aad3e7413a0a6fe63041da1b4bc3e41d5228a33b60bd03fabf22d8"}, {file = "fakeredis-1.9.4-py3-none-any.whl", hash = "sha256:61afe14095aad3e7413a0a6fe63041da1b4bc3e41d5228a33b60bd03fabf22d8"},
{file = "fakeredis-1.9.4.tar.gz", hash = "sha256:17415645d11994061f5394f3f1c76ba4531f3f8b63f9c55a8fd2120bebcbfae9"}, {file = "fakeredis-1.9.4.tar.gz", hash = "sha256:17415645d11994061f5394f3f1c76ba4531f3f8b63f9c55a8fd2120bebcbfae9"},
@ -1917,6 +1940,10 @@ numpy = [
{file = "numpy-1.23.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4d52914c88b4930dafb6c48ba5115a96cbab40f45740239d9f4159c4ba779962"}, {file = "numpy-1.23.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4d52914c88b4930dafb6c48ba5115a96cbab40f45740239d9f4159c4ba779962"},
{file = "numpy-1.23.4.tar.gz", hash = "sha256:ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c"}, {file = "numpy-1.23.4.tar.gz", hash = "sha256:ed2cc92af0efad20198638c69bb0fc2870a58dabfba6eb722c933b48556c686c"},
] ]
openpyxl = [
{file = "openpyxl-3.0.10-py2.py3-none-any.whl", hash = "sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355"},
{file = "openpyxl-3.0.10.tar.gz", hash = "sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449"},
]
packaging = [ packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},

View File

@ -39,6 +39,7 @@ fastapi = "^0.85.1"
uvicorn = {extras = ["standard"], version = "^0.18.3"} uvicorn = {extras = ["standard"], version = "^0.18.3"}
sentry-sdk = "^1.9.10" sentry-sdk = "^1.9.10"
GitPython = "^3.1.29" GitPython = "^3.1.29"
openpyxl = "^3.0.10"
[tool.poetry.extras] [tool.poetry.extras]
pyro = ["Pyrogram", "TgCrypto"] pyro = ["Pyrogram", "TgCrypto"]

1
resources/json/zh.json Normal file

File diff suppressed because one or more lines are too long