🎉 原神地图API

This commit is contained in:
KimgiaiiWuyi 2022-10-16 01:11:57 +08:00
parent d4dce1c0c7
commit c1cb938d26
36 changed files with 3355 additions and 0 deletions

5
.gitignore vendored
View File

@ -127,3 +127,8 @@ dmypy.json
# Pyre type checker # Pyre type checker
.pyre/ .pyre/
# 地图资源
fastapi_genshin_map/GetMapImage/map_data
fastapi_genshin_map/GetMapImage/resource_data
fastapi_genshin_map/GetMapImage/genshinmap.log

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "fastapi_genshin_map/GenshinMap"]
path = fastapi_genshin_map/GetMapImage/GenshinMap
url = https://github.com/MingxuanGame/GenshinMap.git

View File

@ -0,0 +1,16 @@
# EditorConfig is awesome: https://EditorConfig.org
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[{*.py,*.pyi}]
indent_size = 4
[*.md]
trim_trailing_whitespace = false

View File

@ -0,0 +1,53 @@
name: Unittest
on:
push:
branches:
- master
pull_request:
jobs:
test:
name: Coverage
strategy:
matrix:
version: ["3.8", "3.9", "3.10"]
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
env:
OS: ${{ matrix.os }}
PYTHON_VERSION: ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.version }}
architecture: "x64"
- uses: Gr1N/setup-poetry@v7
- id: poetry-cache
run: echo "::set-output name=dir::$(poetry config virtualenvs.path)"
shell: bash
- uses: actions/cache@v2
with:
path: ${{ steps.poetry-cache.outputs.dir }}
key: ${{ runner.os }}-poetry-${{ steps.python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}
- name: Install dependencies
run: poetry install
shell: bash
- name: Run Pytest
run: |
poetry run pytest -n auto --cov-report xml
- name: Upload report
uses: codecov/codecov-action@v3
with:
env_vars: OS,PYTHON_VERSION
files: ./coverage.xml
flags: unittests

View File

@ -0,0 +1,663 @@
# Created by https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,jetbrains+all,python
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudio,visualstudiocode,jetbrains+all,python
### JetBrains+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### JetBrains+all Patch ###
# Ignore everything but code style settings and run configurations
# that are supposed to be shared within teams.
.idea/*
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### VisualStudioCode ###
.vscode/*
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# Support for Project snippet scope
.vscode/*.code-snippets
# Ignore code-workspaces
*.code-workspace
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
# Local History for Visual Studio Code
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
### VisualStudio Patch ###
# Additional files built by Visual Studio
# End of https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,jetbrains+all,python,data

View File

@ -0,0 +1,22 @@
ci:
autofix_commit_msg: ":rotating_light: auto fix by pre-commit-ci"
autofix_prs: true
autoupdate_branch: master
autoupdate_schedule: monthly
autoupdate_commit_msg: ":arrow_up: auto update by pre-commit-ci"
repos:
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 22.8.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 5.0.4
hooks:
- id: flake8

View File

@ -0,0 +1,19 @@
The MIT License (MIT)
Copyright (c) 2022 MingxuanGame
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,65 @@
# GenshinMap
[![License](https://img.shields.io/github/license/MingxuanGame/GenshinMap?style=flat-square)](https://github.com/MingxuanGame/GenshinMap/blob/master/LICENSE)
[![QQ群](https://img.shields.io/badge/QQ%E7%BE%A4-929275476-success?style=flat-square)](https://jq.qq.com/?_wv=1027&k=C7XY04F1)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?&labelColor=ef8336)](https://pycqa.github.io/isort/)
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/MingxuanGame/GenshinMap/master.svg)](https://results.pre-commit.ci/latest/github/MingxuanGame/GenshinMap/master)
[![Codecov](https://codecov.io/gh/MingxuanGame/GenshinMap/branch/master/graph/badge.svg?token=SVSXXE6MBQ)](https://app.codecov.io/gh/MingxuanGame/GenshinMap)
GenshinMap 是一个原神米游社大地图 API 的包装,用于简易获取大地图数据
## 快速开始
```python
import asyncio
from genshinmap import utils, models, request
async def main():
map_id = models.MapID.teyvat
# 获取地图数据
maps = await request.get_maps(map_id)
# 获取资源列表
labels = await request.get_labels(map_id)
# 获取坐标
points = await request.get_points(map_id)
# 获取单片地图
map_image = await utils.get_map_by_pos(maps.detail, 1024)
# 获取传送锚点坐标
transmittable = utils.get_points_by_id(3, points)
# 转换坐标
transmittable_converted = utils.convert_pos(
transmittable, maps.detail.origin
)
# 获取地图锚点之一
anchors = await request.get_anchors(map_id)[0]
# 转换地图锚点偏左坐标
anchors_converted = utils.convert_pos(
anchors.get_children_all_left_point(), maps.detail.origin
)
if __name__ == "__main__":
asyncio.run(main())
```
## 致谢
[观测大地图](https://webstatic.mihoyo.com/app/ys-map-cn/index.html) —— 本项目所包装的地图
[H-K-Y/Genshin_Impact_bot](https://github.com/H-K-Y/Genshin_Impact_bot) —— 大地图数据的处理,本项目的蓝本
## 计划
* [ ] 补全模型相关文档
* [ ] yuanshen.site 支持
* *More...*
## 许可
[MIT](./LICENSE)

View File

@ -0,0 +1,17 @@
from .models import Maps as Maps # noqa: F401
from .models import Tree as Tree # noqa: F401
from .models import MapID as MapID # noqa: F401
from .models import Point as Point # noqa: F401
from .models import Slice as Slice # noqa: F401
from .models import MapInfo as MapInfo # noqa: F401
from .models import XYPoint as XYPoint # noqa: F401
from .utils import make_map as make_map # noqa: F401
from .request import get_maps as get_maps # noqa: F401
from .exc import StatusError as StatusError # noqa: F401
from .request import get_labels as get_labels # noqa: F401
from .request import get_points as get_points # noqa: F401
from .utils import convert_pos as convert_pos # noqa: F401
from .utils import get_map_by_pos as get_map_by_pos # noqa: F401
from .utils import get_points_by_id as get_points_by_id # noqa: F401
__all__ = ["utils", "request", "exc", "models", "imgs"]

View File

@ -0,0 +1,13 @@
class StatusError(ValueError):
"""米游社状态异常"""
def __init__(self, status: int, message: str, *args: object) -> None:
super().__init__(status, message, *args)
self.status = status
self.message = message
def __str__(self) -> str:
return f"miHoYo API {self.status}: {self.message}"
def __repr__(self) -> str:
return f"<StatusError status={self.status}, message={self.message}>"

View File

@ -0,0 +1,85 @@
from __future__ import annotations
from typing import List, Tuple
import numpy as np
from sklearn.cluster import KMeans
from shapely.geometry import Point, Polygon
from .models import XYPoint
Pos = Tuple[float, float]
Poses = List[XYPoint]
Points = List[Point]
def k_means_points(
points: List[XYPoint], length: int = 500, clusters: int = 3
) -> List[Tuple[XYPoint, XYPoint, Poses]]:
"""
通过 K-Means 获取集群坐标列表
参数
points: `list[XYPoint]`
坐标列表建议预先使用 `convert_pos` 进行坐标转换
length: `int` (default: 500)
区域大小如果太大则可能一个点会在多个集群中
clusters: `int` (default: 3)
集群数量
返回
`list[tuple[XYPoint, XYPoint, list[XYPoint]]]`
tuple
1 个元素为集群最左上方的点
2 个元素为集群最右下方的点
3 个元素为集群内所有点
list 按照集群内点的数量降序排序
提示
length
+---------------------+
|--length--|--length--
+---------------------+
"""
pos_array = np.array(points)
k_means = KMeans(n_clusters=clusters).fit(pos_array)
points_temp: List[Points] = []
for k_means_pos in k_means.cluster_centers_:
x = (
k_means_pos[0] - length if k_means_pos[0] > length else 0,
k_means_pos[0] + length,
)
y = (
k_means_pos[1] - length if k_means_pos[1] > length else 0,
k_means_pos[1] + length,
)
path = Polygon(
[(x[0], y[0]), (x[0], y[1]), (x[1], y[1]), (x[1], y[0])]
)
points_temp.append(
[Point(i) for i in pos_array if path.contains(Point(i))]
)
return_list = []
for i in points_temp:
pos_array_ = np.array([p.xy for p in i])
return_list.append(
(
XYPoint(pos_array_[:, 0].min(), pos_array_[:, 1].min()),
XYPoint(pos_array_[:, 0].max(), pos_array_[:, 1].max()),
list(map(lambda p: XYPoint(p.x, p.y), i)),
)
)
return sorted(
return_list, key=lambda pos_tuple: len(pos_tuple[2]), reverse=True
)

View File

@ -0,0 +1,187 @@
from __future__ import annotations
from enum import IntEnum
from typing import List, Tuple, Optional, NamedTuple
from pydantic import HttpUrl, BaseModel, validator
class MapID(IntEnum):
"""地图 ID"""
teyvat = 2
"""提瓦特"""
enkanomiya = 7
"""渊下宫"""
chasm = 9
"""层岩巨渊·地下矿区"""
# golden_apple_archipelago = 12
"""金苹果群岛"""
class Label(BaseModel):
id: int
name: str
icon: HttpUrl
parent_id: int
depth: int
node_type: int
jump_type: int
jump_target_id: int
display_priority: int
children: list
activity_page_label: int
area_page_label: List[int]
is_all_area: bool
class Tree(BaseModel):
id: int
name: str
icon: str
parent_id: int
depth: int
node_type: int
jump_type: int
jump_target_id: int
display_priority: int
children: List[Label]
activity_page_label: int
area_page_label: List
is_all_area: bool
class Point(BaseModel):
id: int
label_id: int
x_pos: float
y_pos: float
author_name: str
ctime: str
display_state: int
class Slice(BaseModel):
url: HttpUrl
class Maps(BaseModel):
slices: List[HttpUrl]
origin: List[int]
total_size: List[int]
padding: List[int]
@validator("slices", pre=True)
def slices_to_list(cls, v):
urls: List[str] = []
for i in v:
urls.extend(j["url"] for j in i)
return urls
class MapInfo(BaseModel):
id: int
name: str
parent_id: int
depth: int
detail: Maps
node_type: int
children: list
icon: Optional[HttpUrl]
ch_ext: Optional[str]
@validator("detail", pre=True)
def detail_str_to_maps(cls, v):
return Maps.parse_raw(v)
class XYPoint(NamedTuple):
x: float
y: float
class Kind(BaseModel):
id: int
name: str
icon_id: int
icon_url: HttpUrl
is_game: int
class SpotKinds(BaseModel):
list: List[Kind]
is_sync: bool
already_share: bool
class Spot(BaseModel):
id: int
name: str
content: str
kind_id: int
spot_icon: str
x_pos: float
y_pos: float
nick_name: str
avatar_url: HttpUrl
status: int
class SubAnchor(BaseModel):
id: int
name: str
l_x: int
l_y: int
r_x: int
r_y: int
app_sn: str
parent_id: str
map_id: str
sort: int
class Anchor(BaseModel):
id: int
name: str
l_x: int
l_y: int
r_x: int
r_y: int
app_sn: str
parent_id: str
map_id: str
children: List[SubAnchor]
sort: int
def get_children_all_left_point(self) -> List[XYPoint]:
"""获取所有子锚点偏左的 `XYPoint` 坐标"""
return [XYPoint(x=i.l_x, y=i.l_y) for i in self.children]
def get_children_all_right_point(self) -> List[XYPoint]:
"""获取所有子锚点偏右的 `XYPoint` 坐标"""
return [XYPoint(x=i.r_x, y=i.r_y) for i in self.children]
class PageLabel(BaseModel):
id: int
name: str
type: int
pc_icon_url: str
mobile_icon_url: str
sort: int
pc_icon_url2: str
map_id: int
jump_url: str
jump_type: str
center: Optional[Tuple[float, float]]
zoom: Optional[float]
@validator("center", pre=True)
def center_str_to_tuple(cls, v: str) -> Optional[Tuple[float, float]]:
if v and (splitted := v.split(",")):
return tuple(map(float, splitted))
@validator("zoom", pre=True)
def zoom_str_to_float(cls, v: str):
if v:
return float(v)

View File

@ -0,0 +1,176 @@
from __future__ import annotations
from typing import Any, Dict, List, Tuple
from httpx import Response, AsyncClient
from .exc import StatusError
from .models import (
Spot,
Tree,
MapID,
Point,
Anchor,
MapInfo,
PageLabel,
SpotKinds,
)
API_CLIENT = AsyncClient(
base_url="https://api-takumi.mihoyo.com/common/map_user/ys_obc/v1/map"
)
Spots = Dict[int, List[Spot]]
async def _request(
endpoint: str, client: AsyncClient = API_CLIENT
) -> Dict[str, Any]:
resp = await client.get(endpoint)
resp.raise_for_status()
data: Dict[str, Any] = resp.json()
if data["retcode"] != 0:
raise StatusError(data["retcode"], data["message"])
return data["data"]
async def get_labels(map_id: MapID) -> List[Tree]:
"""
获取米游社资源列表
参数
map_id: `MapID`
地图 ID
返回
`list[Tree]`
"""
data = await _request(f"/label/tree?map_id={map_id}&app_sn=ys_obc")
return [Tree.parse_obj(i) for i in data["tree"]]
async def get_points(map_id: MapID) -> List[Point]:
"""
获取米游社坐标列表
参数
map_id: `MapID`
地图 ID
返回
`list[Point]`
"""
data = await _request(f"/point/list?map_id={map_id}&app_sn=ys_obc")
return [Point.parse_obj(i) for i in data["point_list"]]
async def get_maps(map_id: MapID) -> MapInfo:
"""
获取米游社地图
参数
map_id: `MapID`
地图 ID
返回
`MapInfo`
"""
data = await _request(f"/info?map_id={map_id}&app_sn=ys_obc&lang=zh-cn")
return MapInfo.parse_obj(data["info"])
async def get_spot_from_game(
map_id: MapID, cookie: str
) -> Tuple[Spots, SpotKinds]:
"""
获取游戏内标点
注意每十分钟只能获取一次否则会 -2000 错误
参数
map_id: `MapID`
地图 ID
cookie: `str`
米游社 Cookie
返回
`tuple[Spots, SpotKinds]`
"""
def _raise_for_retcode(resp: Response) -> Dict[str, Any]:
resp.raise_for_status()
data: dict[str, Any] = resp.json()
if data["retcode"] != 0:
raise StatusError(data["retcode"], data["message"])
return data["data"]
# 1. 申请刷新
resp = await API_CLIENT.post(
"/spot_kind/sync_game_spot",
json={
"map_id": str(map_id.value),
"app_sn": "ys_obc",
"lang": "zh-cn",
},
headers={"Cookie": cookie},
)
_raise_for_retcode(resp)
# 2. 获取类别
resp = await API_CLIENT.get(
"/spot_kind/get_spot_kinds?map_id=2&app_sn=ys_obc&lang=zh-cn",
headers={"Cookie": cookie},
)
data = _raise_for_retcode(resp)
spot_kinds_data = SpotKinds.parse_obj(data)
ids = [kind.id for kind in spot_kinds_data.list]
# 3.获取坐标
resp = await API_CLIENT.post(
"/spot/get_map_spots_by_kinds",
json={
"map_id": str(map_id.value),
"app_sn": "ys_obc",
"lang": "zh-cn",
"kind_ids": ids,
},
)
data = _raise_for_retcode(resp)
spots: Spots = {}
for k, v in data["spots"].items():
spots[int(k)] = [Spot.parse_obj(i) for i in v["list"]]
return spots, spot_kinds_data
async def get_page_label(map_id: MapID) -> List[PageLabel]:
"""
获取米游社大地图标签例如蒙德龙脊雪山等
参数
map_id: `MapID`
地图 ID
返回
`list[PageLabel]`
"""
data = await _request(
f"/get_map_pageLabel?map_id={map_id}&app_sn=ys_obc&lang=zh-cn",
)
return [PageLabel.parse_obj(i) for i in data["list"]]
async def get_anchors(map_id: MapID) -> List[Anchor]:
"""
获取米游社地图锚点含子锚点例如珉林-庆云顶等
参数
map_id: `MapID`
地图 ID
返回
`list[Anchor]`
"""
data = await _request(
f"/map_anchor/list?map_id={map_id}&app_sn=ys_obc&lang=zh-cn",
)
return [Anchor.parse_obj(i) for i in data["list"]]

View File

@ -0,0 +1,220 @@
from __future__ import annotations
from math import ceil
from io import BytesIO
from typing import List, Tuple, Union
from asyncio import gather, create_task
from PIL import Image
from httpx import AsyncClient
from .models import Maps, Point, XYPoint
CLIENT = AsyncClient()
async def get_img(url: str) -> Image.Image:
resp = await CLIENT.get(url)
resp.raise_for_status()
return Image.open(BytesIO(resp.read()))
async def make_map(map: Maps) -> Image.Image:
"""
获取所有地图并拼接
警告可能导致内存溢出
在测试中获取并合成提瓦特地图时占用了约 1.4 GiB
建议使用 `genshinmap.utils.get_map_by_pos` 获取地图单片
参数
map: `Maps`
地图数据可通过 `get_maps` 获取
返回
`PIL.Image.Image` 对象
另见
`get_map_by_pos`
"""
img = Image.new("RGBA", tuple(map.total_size))
x = 0
y = 0
maps: List[Image.Image] = await gather(
*[create_task(get_img(url)) for url in map.slices]
)
for m in maps:
img.paste(m, (x, y))
x += 4096
if x >= map.total_size[0]:
x = 0
y += 4096
return img
async def get_map_by_pos(
map: Maps, x: Union[int, float], y: Union[int, float] = 0
) -> Image.Image:
"""
根据横坐标获取地图单片
参数
map: `Maps`
地图数据可通过 `get_maps` 获取
x: `int | float`
横坐标
y: `int | float` (default: 0)
纵坐标
返回
`PIL.Image.Image` 对象
"""
return await get_img(map.slices[_pos_to_index(x, y)])
def get_points_by_id(id_: int, points: List[Point]) -> List[XYPoint]:
"""
根据 Label ID 获取坐标点
参数
id_: `int`
Label ID
points: `list[Point]`
米游社坐标点列表可通过 `get_points` 获取
返回
`list[XYPoint]`
"""
return [
XYPoint(point.x_pos, point.y_pos)
for point in points
if point.label_id == id_
]
def convert_pos(points: List[XYPoint], origin: List[int]) -> List[XYPoint]:
"""
将米游社资源坐标转换为以左上角为原点的坐标系的坐标
参数
points: `list[XYPoint]`
米游社资源坐标
origin: `list[Point]`
米游社地图 Origin可通过 `get_maps` 获取
返回
`list[XYPoint]`
示例
>>> from genshinmap.models import XYPoint
>>> points = [XYPoint(1200, 5000), XYPoint(-4200, 1800)]
>>> origin = [4844,4335]
>>> convert_pos(points, origin)
[XYPoint(x=6044, y=9335), XYPoint(x=644, y=6135)]
"""
return [XYPoint(x + origin[0], y + origin[1]) for x, y in points]
def convert_pos_crop(
top_left_index: int, points: List[XYPoint]
) -> List[XYPoint]:
"""
根据左上角地图切片的索引转换坐标已经通过 `convert_pos` 转换
参数
top_left_index: `int`
左上角地切片图的索引
points: `list[XYPoint]`
米游社资源坐标已经通过 `convert_pos` 转换
返回
`list[XYPoint]`
示例
>>> from genshinmap.models import XYPoint
>>> points = [XYPoint(0, 0), XYPoint(20, 20)]
>>> convert_pos_crop(0, points)
[XYPoint(x=0, y=0), XYPoint(x=20, y=20)]
>>> convert_pos_crop(1, points)
[XYPoint(x=-4096, y=0), XYPoint(x=-4076, y=20)]
>>> convert_pos_crop(4, points)
[XYPoint(x=0, y=-4096), XYPoint(x=20, y=-4076)]
>>> convert_pos_crop(5, points)
[XYPoint(x=-4096, y=-4096),XYPoint(x=-4076, y=-4076)]
"""
y, x = divmod(top_left_index, 4)
if x == y == 0:
return points
x *= 4096
y *= 4096
result = []
for point in points:
px, py = point
result.append(XYPoint(px - x, py - y))
return result
def _pos_to_index(x: Union[int, float], y: Union[int, float]) -> int:
# 4 * (y // 4096) {0,4,8}
# x // 4096 {0,1,2,3}
return 4 * (int(y // 4096)) + int(x // 4096)
def _generate_matrix(
top_left: int, top_right: int, bottom_left: int
) -> List[int]:
result = []
while True:
result.extend(iter(range(top_left, top_right + 1)))
if top_left == bottom_left:
break
top_left_copy = top_left
top_left += 4
top_right = top_left + (top_right - top_left_copy)
return result
def crop_image_and_points(
points: List[XYPoint],
) -> Tuple[List[int], int, List[XYPoint]]:
"""
根据坐标需通过 `convert_pos` 转换计算地图切片索引间隔即贴完一张图片后还需要贴几张才换行和转换后的坐标
参数
points: `list[XYPoint]`
米游社资源坐标已经通过 `convert_pos` 转换
返回
`tuple[list[int], int, list[XYPoint]]`
1 个元素为地图切片索引的列表
2 个元素为间隔
3 个元素为使用 `convert_pos_crop` 转换后的坐标
示例
>>> points = [XYPoint(x=4200, y=8000), XYPoint(x=4150, y=10240)]
>>> crop_image_and_points(points)
([5, 9], 0, [XYPoint(x=104, y=3904), XYPoint(x=54, y=6144)])
"""
xs = [p.x for p in points]
ys = [p.y for p in points]
x1, y1 = min(xs), min(ys)
x2, y2 = max(xs), max(ys)
x1 = int(x1 // 4096 * 4096)
x2 = x1 if x1 + 4096 >= x2 else ceil(x2 / 4096) * 4096 - 4096
y1 = int(y1 // 4096 * 4096)
y2 = y1 if y1 + 4096 >= y2 else ceil(y2 / 4096) * 4096 - 4096
index_x1, index_x2 = _pos_to_index(x1, y1), _pos_to_index(x2, y1)
return (
_generate_matrix(index_x1, index_x2, _pos_to_index(x1, y2)),
index_x2 - index_x1,
convert_pos_crop(index_x1, points),
)

View File

@ -0,0 +1,819 @@
[[package]]
name = "anyio"
version = "3.6.1"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
idna = ">=2.8"
sniffio = ">=1.1"
[package.extras]
doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
trio = ["trio (>=0.16)"]
[[package]]
name = "attrs"
version = "22.1.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
python-versions = ">=3.5"
[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "black"
version = "22.6.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "certifi"
version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "cfgv"
version = "3.3.1"
description = "Validate configuration and produce human readable error messages."
category = "dev"
optional = false
python-versions = ">=3.6.1"
[[package]]
name = "click"
version = "8.1.3"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.5"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
version = "6.5.0"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
toml = ["tomli"]
[[package]]
name = "distlib"
version = "0.3.5"
description = "Distribution utilities"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "execnet"
version = "1.9.0"
description = "execnet: rapid multi-Python deployment"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
testing = ["pre-commit"]
[[package]]
name = "filelock"
version = "3.7.1"
description = "A platform independent file lock."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
[[package]]
name = "flake8"
version = "5.0.4"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.9.0,<2.10.0"
pyflakes = ">=2.5.0,<2.6.0"
[[package]]
name = "h11"
version = "0.12.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "httpcore"
version = "0.15.0"
description = "A minimal low-level HTTP client."
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
anyio = ">=3.0.0,<4.0.0"
certifi = "*"
h11 = ">=0.11,<0.13"
sniffio = ">=1.0.0,<2.0.0"
[package.extras]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "httpx"
version = "0.23.0"
description = "The next generation HTTP client."
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
certifi = "*"
httpcore = ">=0.15.0,<0.16.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*"
[package.extras]
brotli = ["brotlicffi", "brotli"]
cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "identify"
version = "2.5.3"
description = "File identification library for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
license = ["ukkonen"]
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "isort"
version = "5.10.1"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
plugins = ["setuptools"]
[[package]]
name = "joblib"
version = "1.1.0"
description = "Lightweight pipelining with Python functions"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "mypy-extensions"
version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "nodeenv"
version = "1.7.0"
description = "Node.js virtual environment builder"
category = "dev"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
[[package]]
name = "numpy"
version = "1.23.1"
description = "NumPy is the fundamental package for array computing with Python."
category = "main"
optional = false
python-versions = ">=3.8"
[[package]]
name = "packaging"
version = "21.3"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pathspec"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "pillow"
version = "9.2.0"
description = "Python Imaging Library (Fork)"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "platformdirs"
version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
version = "2.20.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
toml = "*"
virtualenv = ">=20.0.8"
[[package]]
name = "py"
version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycodestyle"
version = "2.9.1"
description = "Python style guide checker"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "pydantic"
version = "1.9.1"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
typing-extensions = ">=3.7.4.3"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pyflakes"
version = "2.5.0"
description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "pyparsing"
version = "3.0.9"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "dev"
optional = false
python-versions = ">=3.6.8"
[package.extras]
diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pytest"
version = "7.1.3"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
py = ">=1.8.2"
tomli = ">=1.0.0"
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.19.0"
description = "Pytest support for asyncio"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
pytest = ">=6.1.0"
[package.extras]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
[[package]]
name = "pytest-cov"
version = "4.0.0"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-forked"
version = "1.4.0"
description = "run tests in isolated forked subprocesses"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
py = "*"
pytest = ">=3.10"
[[package]]
name = "pytest-xdist"
version = "2.5.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
execnet = ">=1.1"
pytest = ">=6.2.0"
pytest-forked = "*"
[package.extras]
psutil = ["psutil (>=3.0)"]
setproctitle = ["setproctitle"]
testing = ["filelock"]
[[package]]
name = "pyyaml"
version = "6.0"
description = "YAML parser and emitter for Python"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]]
name = "rfc3986"
version = "1.5.0"
description = "Validating URI References per RFC 3986"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
[package.extras]
idna2008 = ["idna"]
[[package]]
name = "scikit-learn"
version = "1.1.2"
description = "A set of python modules for machine learning and data mining"
category = "main"
optional = false
python-versions = ">=3.8"
[package.dependencies]
joblib = ">=1.0.0"
numpy = ">=1.17.3"
scipy = ">=1.3.2"
threadpoolctl = ">=2.0.0"
[package.extras]
tests = ["numpydoc (>=1.2.0)", "pyamg (>=4.0.0)", "mypy (>=0.961)", "black (>=22.3.0)", "flake8 (>=3.8.2)", "pytest-cov (>=2.9.0)", "pytest (>=5.0.1)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "matplotlib (>=3.1.2)"]
examples = ["seaborn (>=0.9.0)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "matplotlib (>=3.1.2)"]
docs = ["sphinxext-opengraph (>=0.4.2)", "sphinx-prompt (>=1.3.0)", "Pillow (>=7.1.2)", "numpydoc (>=1.2.0)", "sphinx-gallery (>=0.7.0)", "sphinx (>=4.0.1)", "memory-profiler (>=0.57.0)", "seaborn (>=0.9.0)", "pandas (>=1.0.5)", "scikit-image (>=0.16.2)", "matplotlib (>=3.1.2)"]
benchmark = ["memory-profiler (>=0.57.0)", "pandas (>=1.0.5)", "matplotlib (>=3.1.2)"]
[[package]]
name = "scipy"
version = "1.9.0"
description = "SciPy: Scientific Library for Python"
category = "main"
optional = false
python-versions = ">=3.8,<3.12"
[package.dependencies]
numpy = ">=1.18.5,<1.25.0"
[[package]]
name = "shapely"
version = "1.8.2"
description = "Geometric objects, predicates, and operations"
category = "main"
optional = false
python-versions = ">=3.6"
[package.extras]
all = ["pytest", "pytest-cov", "numpy"]
test = ["pytest", "pytest-cov"]
vectorized = ["numpy"]
[[package]]
name = "sniffio"
version = "1.2.0"
description = "Sniff out which async library your code is running under"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "threadpoolctl"
version = "3.1.0"
description = "threadpoolctl"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.7"
[[package]]
name = "typing-extensions"
version = "4.3.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "virtualenv"
version = "20.16.3"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
distlib = ">=0.3.5,<1"
filelock = ">=3.4.1,<4"
platformdirs = ">=2.4,<3"
[package.extras]
docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"]
testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
[metadata]
lock-version = "1.1"
python-versions = ">=3.8,<3.12" # for scipy
content-hash = "22a50ccf4320b985e2dd4138e2ca946816e0d4dd31c0d3058382072d31bafe6d"
[metadata.files]
anyio = [
{file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"},
{file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"},
]
attrs = []
black = [
{file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"},
{file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"},
{file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"},
{file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"},
{file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"},
{file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"},
{file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"},
{file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"},
{file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"},
{file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"},
{file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"},
{file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"},
{file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"},
{file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"},
{file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"},
{file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"},
{file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"},
{file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"},
{file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"},
{file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"},
{file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"},
{file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"},
{file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"},
]
certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
cfgv = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
click = [
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
]
colorama = [
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
]
coverage = []
distlib = []
execnet = []
filelock = [
{file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"},
{file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"},
]
flake8 = []
h11 = [
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
]
httpcore = [
{file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
{file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
]
httpx = [
{file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
{file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
]
identify = []
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
{file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
{file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
]
joblib = []
mccabe = []
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
nodeenv = [
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
{file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
]
numpy = []
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
pillow = []
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
pre-commit = []
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pycodestyle = []
pydantic = [
{file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"},
{file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"},
{file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"},
{file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"},
{file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"},
{file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"},
{file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"},
{file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"},
{file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"},
{file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"},
{file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"},
]
pyflakes = []
pyparsing = [
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
]
pytest = []
pytest-asyncio = []
pytest-cov = []
pytest-forked = []
pytest-xdist = []
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
rfc3986 = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
scikit-learn = []
scipy = []
shapely = []
sniffio = [
{file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
{file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
]
threadpoolctl = []
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomli = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
typing-extensions = []
virtualenv = []

View File

@ -0,0 +1,51 @@
[tool.poetry]
name = "genshinmap"
version = "0.1.0"
description = "GenshinMap 是一个米游社大地图 API 的包装,用于简易获取大地图数据"
authors = ["MingxuanGame <MingxuanGame@outlook.com>"]
license = "MIT"
[tool.poetry.dependencies]
python = ">=3.8,<3.12" # for scipy
scipy = "^1.9.0"
Pillow = "^9.2.0"
httpx = "^0.23.0"
pydantic = "^1.9.1"
numpy = "^1.23.1"
scikit-learn = "^1.1.2"
shapely = "^1.8.2"
[tool.poetry.dev-dependencies]
black = "^22.6.0"
flake8 = "^5.0.4"
isort = "^5.10.1"
pre-commit = "^2.20.0"
pytest = "^7.1.3"
pytest-asyncio = "^0.19.0"
pytest-cov = "^4.0.0"
pytest-xdist = "^2.5.0"
coverage = "^6.5.0"
[tool.black]
line-length = 79
target-version = ["py38", "py39", "py310", "py311"]
include = '\.pyi?$'
extend-exclude = '''
'''
[tool.isort]
profile = "black"
line_length = 79
length_sort = true
skip_gitignore = true
force_sort_within_sections = true
extra_standard_library = ["typing_extensions"]
[tool.pytest.ini_options]
asyncio_mode = "auto"
addopts = "--cov=genshinmap --cov-report=term-missing"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,71 @@
{
"list": [
{
"id": "178",
"name": "列柱沙原",
"l_x": -6845,
"l_y": 4014,
"r_x": -5689,
"r_y": 5306,
"app_sn": "",
"parent_id": "0",
"map_id": "2",
"children": [
{
"id": "191",
"name": "秘仪圣殿",
"l_x": -5897,
"l_y": 4570,
"r_x": -5617,
"r_y": 4750,
"app_sn": "",
"parent_id": "178",
"map_id": "2",
"children": [],
"sort": 0
},
{
"id": "179",
"name": "赤王陵",
"l_x": -6461,
"l_y": 4482,
"r_x": -6081,
"r_y": 4848,
"app_sn": "",
"parent_id": "178",
"map_id": "2",
"children": [],
"sort": 0
}
],
"sort": 0
},
{
"id": "184",
"name": "上风蚀地",
"l_x": -5709,
"l_y": 4848,
"r_x": -4901,
"r_y": 5906,
"app_sn": "",
"parent_id": "0",
"map_id": "2",
"children": [
{
"id": "185",
"name": "荼诃落谷",
"l_x": -5367,
"l_y": 5164,
"r_x": -5053,
"r_y": 5780,
"app_sn": "",
"parent_id": "184",
"map_id": "2",
"children": [],
"sort": 0
}
],
"sort": 0
}
]
}

View File

@ -0,0 +1,13 @@
import asyncio
import pytest
@pytest.fixture(scope="session")
def event_loop():
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
yield loop
loop.close()

View File

@ -0,0 +1,96 @@
{
"tree": [
{
"id": 1,
"name": "传送点",
"icon": "",
"parent_id": 0,
"depth": 1,
"node_type": 1,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [
{
"id": 2,
"name": "七天神像",
"icon": "https://uploadstatic.mihoyo.com/ys-obc/2020/09/08/75276545/c59585d1fabc9c22ad3fcf94e1622aa8_357413506633071859.png",
"parent_id": 1,
"depth": 2,
"node_type": 2,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [],
"activity_page_label": 27,
"area_page_label": [],
"is_all_area": true
},
{
"id": 3,
"name": "传送锚点",
"icon": "https://uploadstatic.mihoyo.com/ys-obc/2020/09/08/75276545/0cc42d15134cbb724304050fd0bbcaac_8799482478853097434.png",
"parent_id": 1,
"depth": 2,
"node_type": 2,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [],
"activity_page_label": 27,
"area_page_label": [],
"is_all_area": true
}
],
"activity_page_label": 0,
"area_page_label": [],
"is_all_area": false
},
{
"id": 426,
"name": "地标",
"icon": "",
"parent_id": 0,
"depth": 1,
"node_type": 1,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [
{
"id": 410,
"name": "洞口",
"icon": "https://uploadstatic.mihoyo.com/ys-obc/2022/08/10/75379475/d06eb3e4e17ad63d822a5065ee6f002e_2513888037572049765.png",
"parent_id": 426,
"depth": 2,
"node_type": 2,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [],
"activity_page_label": 27,
"area_page_label": [25],
"is_all_area": false
},
{
"id": 190,
"name": "浪船锚点",
"icon": "https://uploadstatic.mihoyo.com/ys-obc/2021/06/10/16314655/6db2133bf163989dbff9c6a1e0c814a0_7631310718225262758.png",
"parent_id": 426,
"depth": 2,
"node_type": 2,
"jump_type": 0,
"jump_target_id": 0,
"display_priority": 0,
"children": [],
"activity_page_label": 24,
"area_page_label": [8],
"is_all_area": false
}
],
"activity_page_label": 0,
"area_page_label": [],
"is_all_area": false
}
]
}

View File

@ -0,0 +1,13 @@
{
"info": {
"id": 2,
"name": "提瓦特大地图",
"parent_id": 1,
"depth": 2,
"detail": "{\"slices\":[[{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/0eec74713b864e6f639c9090484f8870_3432819210174318714.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/a56832ba1fe0fedd3b9a3c634df22d25_8286951540042186828.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/3c6dbcbfb72d21a87c3b886a8310067e_7249020933999162032.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/28971171ff5e668c45189c013bae1fdc_4601440331303570504.png\"}],[{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/b5b9903d30037a728395b4c24e7a3998_4062420627465094766.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/aa19c8ddb19294bc2994d83456bd3b07_8029045233523466417.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/171d53d698db8625bd8270de3a2c04c6_6971423124470409050.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/5787c113e4c746352dd8bcb72418a344_7040357371920568328.png\"}],[{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/47e448278ac5e80a1f3272085f78d3db_767778978113016811.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/2a5a4c3d9f3ba1d2b0781e50ee63fb71_7932745916637713891.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/b563941d22abfe9085dd61c2de711e32_5711770985239944753.png\"},{\"url\":\"https://uploadstatic.mihoyo.com/ys-obc/2022/09/15/75379475/2895d7ef091682cb9c1f75fdde5b891f_7629364203486646958.png\"}]],\"origin\":[8939,2286],\"total_size\":[16384,12288],\"padding\":[1024,512]}",
"node_type": 2,
"children": [],
"icon": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/191a621844575ee5b8e44035834d051f_3328806497146477608.png",
"ch_ext": ""
}
}

View File

@ -0,0 +1,44 @@
{
"list": [
{
"id": 1,
"name": "蒙德",
"type": 1,
"pc_icon_url": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/fa4770a33c59b94f2ba0d6e3608a9a47_196000061405650070.png",
"mobile_icon_url": "",
"sort": 5,
"pc_icon_url2": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/15f8175ad96ab5f0a38bf14b9e359666_3990014784054466810.png",
"map_id": 2,
"jump_url": "",
"jump_type": "coordinate",
"center": "-111.96,-200.87",
"zoom": "-1.00"
},
{
"id": 4,
"name": "龙脊雪山",
"type": 1,
"pc_icon_url": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/e60eb5568a5c8122d566bd40decc5371_9191303240951212319.png",
"mobile_icon_url": "",
"sort": 4,
"pc_icon_url2": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/0ba7ceadf1a8a04bd698674371e6813d_6227119471395520312.png",
"map_id": 2,
"jump_url": "",
"jump_type": "coordinate",
"center": "1183.49,-116.41",
"zoom": "0.00"
},
{
"id": 3,
"name": "璃月",
"type": 1,
"pc_icon_url": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/49985f158ed6060b98538fd3b62f6d0f_5835606737467298557.png",
"mobile_icon_url": "",
"sort": 3,
"pc_icon_url2": "https://uploadstatic.mihoyo.com/ys-obc/2022/03/29/75379475/fb1f9ec4eb36cd09bfb412f40a0af864_9071064856381496516.png",
"map_id": 2,
"jump_url": "",
"jump_type": "coordinate",
"center": "2602.08,-1612.55",
"zoom": "-1.00"
}]}

View File

@ -0,0 +1,31 @@
{
"point_list": [
{
"id": 26831,
"label_id": 298,
"x_pos": 114,
"y_pos": 514,
"author_name": "悦弥",
"ctime": "2022-06-16 10:26:12",
"display_state": 1
},
{
"id": 26832,
"label_id": 298,
"x_pos": 1919,
"y_pos": 810,
"author_name": "悦弥",
"ctime": "2022-06-16 10:26:12",
"display_state": 1
},
{
"id": 26831,
"label_id": 297,
"x_pos": -1287.5,
"y_pos": 716,
"author_name": "悦弥",
"ctime": "2022-06-16 10:26:12",
"display_state": 1
}
]
}

View File

@ -0,0 +1,17 @@
{
"retcode": 0,
"message": "OK",
"data": {
"list": [
{
"id": 581061,
"name": "",
"icon_id": 101,
"icon_url": "https://uploadstatic.mihoyo.com/hk4e/upload/officialsites/202011/wiki-ys-map-in-game-5_1605084458_2681.png",
"is_game": 2
}
],
"is_sync": false,
"already_share": false
}
}

View File

@ -0,0 +1,24 @@
{
"retcode": 0,
"message": "OK",
"data": {
"spots": {
"581061": {
"list": [
{
"id": 2267179,
"name": "",
"content": "",
"kind_id": 581061,
"spot_icon": "",
"x_pos": 416.3067626953125,
"y_pos": 57.92724609375,
"nick_name": "MingxuanGame1",
"avatar_url": "https://img-static.mihoyo.com/avatar/avatar40004.png",
"status": 1
}
]
}
}
}
}

View File

@ -0,0 +1,17 @@
import pytest
def test_exc() -> None:
from genshinmap.exc import StatusError
def _raise() -> None:
raise StatusError(1, "error")
with pytest.raises(StatusError) as exc_info:
_raise()
exc = exc_info.value
assert exc.status == 1
assert exc.message == "err"
assert str(exc) == "miHoYo API 1:1 err"
assert repr(exc) == "<StatusError status=1, message=err>"

View File

@ -0,0 +1,21 @@
def test_k_means() -> None:
from genshinmap.models import XYPoint
from genshinmap.img import k_means_points
points = [
# Cluster 1
XYPoint(9, 9),
XYPoint(10, 10),
XYPoint(11, 11),
XYPoint(12, 12),
XYPoint(13, 13),
# Cluster 2
XYPoint(100, 100),
XYPoint(101, 101),
XYPoint(102, 102),
]
clusters = k_means_points(points, 15, 2)
top_left, bottom_right, cluster_points = clusters[0]
assert top_left == XYPoint(9, 9)
assert bottom_right == XYPoint(13, 13)
assert len(cluster_points) == 5

View File

@ -0,0 +1,17 @@
import json
from pathlib import Path
def test_anchor_points() -> None:
from genshinmap.models import Anchor, XYPoint
with open(Path(__file__).parent / "anchors.json", encoding="utf-8") as f:
anchor = Anchor.parse_obj(json.load(f)["list"][0])
assert anchor.get_children_all_left_point() == [
XYPoint(-5897, 4570),
XYPoint(-6461, 4482),
]
assert anchor.get_children_all_right_point() == [
XYPoint(-5617, 4750),
XYPoint(-6081, 4848),
]

View File

@ -0,0 +1,245 @@
import json
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, Optional
import pytest
DIR = Path(__file__).parent
if TYPE_CHECKING:
from httpx import Response
@pytest.mark.asyncio
async def test_labels(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "labels.json", encoding="utf-8") as f:
data = json.load(f)
async def _fake_request(endpoint: str) -> Dict[str, Any]:
return data
monkeypatch.setattr("genshinmap.request._request", _fake_request)
from genshinmap.models import Tree
from genshinmap.request import MapID, get_labels
assert await get_labels(MapID.teyvat) == [
Tree.parse_obj(i) for i in data["tree"]
]
@pytest.mark.asyncio
async def test_points(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "points.json", encoding="utf-8") as f:
data = json.load(f)
async def _fake_request(endpoint: str) -> Dict[str, Any]:
return data
monkeypatch.setattr("genshinmap.request._request", _fake_request)
from genshinmap.models import Point
from genshinmap.request import MapID, get_points
assert await get_points(MapID.teyvat) == [
Point.parse_obj(i) for i in data["point_list"]
]
@pytest.mark.asyncio
async def test_maps(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "maps.json", encoding="utf-8") as f:
data = json.load(f)
async def _fake_request(endpoint: str) -> Dict[str, Any]:
return data
monkeypatch.setattr("genshinmap.request._request", _fake_request)
from genshinmap.models import MapInfo
from genshinmap.request import MapID, get_maps
assert await get_maps(MapID.teyvat) == MapInfo.parse_obj(data["info"])
@pytest.mark.asyncio
async def test_page_label(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "page.json", encoding="utf-8") as f:
data = json.load(f)
async def _fake_request(endpoint: str) -> Dict[str, Any]:
return data
monkeypatch.setattr("genshinmap.request._request", _fake_request)
from genshinmap.models import PageLabel
from genshinmap.request import MapID, get_page_label
assert await get_page_label(MapID.teyvat) == [
PageLabel.parse_obj(i) for i in data["list"]
]
@pytest.mark.asyncio
async def test_anchor(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "anchors.json", encoding="utf-8") as f:
data = json.load(f)
async def _fake_request(endpoint: str) -> Dict[str, Any]:
return data
monkeypatch.setattr("genshinmap.request._request", _fake_request)
from genshinmap.models import Anchor
from genshinmap.request import MapID, get_anchors
assert await get_anchors(MapID.teyvat) == [
Anchor.parse_obj(i) for i in data["list"]
]
@pytest.mark.asyncio
async def test_get_spot_from_game(monkeypatch: pytest.MonkeyPatch) -> None:
with open(DIR / "spots" / "spots.json", encoding="utf-8") as f:
spots = f.read()
with open(DIR / "spots" / "kinds.json", encoding="utf-8") as f:
kinds = f.read()
async def _post(
self,
url: str,
json: Dict[str, Any],
headers: Optional[Dict[str, Any]] = None,
) -> "Response":
from httpx import Request, Response
if url == "/spot_kind/sync_game_spot":
# 1. 申请刷新
return Response(
200,
text='{"retcode":0,"message":"OK","data":{}}',
request=Request("POST", url),
)
else:
# 3.获取坐标
return Response(200, text=spots, request=Request("POST", url))
async def _get(self, url: str, headers: Dict[str, Any]) -> "Response":
from httpx import Request, Response
return Response(200, text=kinds, request=Request("GET", url))
monkeypatch.setattr("httpx._client.AsyncClient.post", _post)
monkeypatch.setattr("httpx._client.AsyncClient.get", _get)
from genshinmap.models import SpotKinds
from genshinmap.request import MapID, get_spot_from_game
spot, kind = await get_spot_from_game(MapID.teyvat, "")
assert spot == {
581061: [
{
"id": 2267179,
"name": "",
"content": "",
"kind_id": 581061,
"spot_icon": "",
"x_pos": 416.3067626953125,
"y_pos": 57.92724609375,
"nick_name": "MingxuanGame1",
"avatar_url": (
"https://img-static.mihoyo.com/avatar/avatar40004.png"
),
"status": 1,
}
]
}
assert kind == SpotKinds.parse_obj(json.loads(kinds)["data"])
@pytest.mark.asyncio
async def test_spot_status_error(
monkeypatch: pytest.MonkeyPatch,
) -> None:
async def _post(
self,
url: str,
json: Dict[str, Any],
headers: Optional[Dict[str, Any]] = None,
) -> "Response":
from httpx import Request, Response
return Response(
200,
text='{"data":null,"message":"10分钟内只能操作一次","retcode":-2000}',
request=Request("POST", url),
)
monkeypatch.setattr("httpx._client.AsyncClient.post", _post)
from genshinmap.exc import StatusError
from genshinmap.request import MapID, get_spot_from_game
with pytest.raises(StatusError) as exc_info:
await get_spot_from_game(MapID.teyvat, "")
exc = exc_info.value
assert exc.status == -2000
assert exc.message == "10分钟内只能操作一次"
@pytest.mark.asyncio
async def test_internal_request_status_error(
monkeypatch: pytest.MonkeyPatch,
) -> None:
async def _get(self, url: str) -> "Response":
from httpx import Request, Response
return Response(
200,
text='{"retcode":0,"message":"OK","data":{"test": 1}}',
request=Request("GET", url),
)
monkeypatch.setattr("httpx._client.AsyncClient.get", _get)
from genshinmap.request import _request
assert await _request("") == {"test": 1}
@pytest.mark.asyncio
async def test_internal_request(monkeypatch: pytest.MonkeyPatch) -> None:
async def _get(self, url: str) -> "Response":
from httpx import Request, Response
return Response(
200,
text='{"retcode":1,"message":"err","data":null}',
request=Request("GET", url),
)
monkeypatch.setattr("httpx._client.AsyncClient.get", _get)
from genshinmap.exc import StatusError
from genshinmap.request import _request
with pytest.raises(StatusError) as exc_info:
await _request("")
exc = exc_info.value
assert exc.status == 1
assert exc.message == "err"
@pytest.mark.asyncio
@pytest.mark.parametrize(argnames="map_id", argvalues=[2, 7, 9])
async def test_connection(map_id) -> None:
from genshinmap.request import (
get_maps,
get_labels,
get_points,
get_anchors,
get_page_label,
)
assert await get_labels(map_id)
assert await get_anchors(map_id)
assert await get_page_label(map_id)
assert await get_maps(map_id)
assert await get_points(map_id)

View File

@ -0,0 +1,94 @@
import json
from pathlib import Path
DIR = Path(__file__).parent
def test_get_points_by_id() -> None:
from genshinmap.models import Point, XYPoint
from genshinmap.utils import get_points_by_id
with open(DIR / "points.json") as f:
points = [Point.parse_obj(i) for i in json.load(f)["point_list"]]
assert get_points_by_id(298, points) == [
XYPoint(114, 514),
XYPoint(1919, 810),
]
def test_convert_pos() -> None:
from genshinmap.models import XYPoint
from genshinmap.utils import convert_pos
points = [XYPoint(1200, 5000), XYPoint(-4200, 1800)]
origin = [4844, 4335]
assert convert_pos(points, origin) == [
XYPoint(x=6044, y=9335),
XYPoint(x=644, y=6135),
]
def test_convert_pos_crop() -> None:
from genshinmap.models import XYPoint
from genshinmap.utils import convert_pos_crop
points = [XYPoint(0, 0), XYPoint(20, 20)]
assert convert_pos_crop(0, points) == points
assert convert_pos_crop(1, points) == [
XYPoint(-4096, 0),
XYPoint(-4076, 20),
]
assert convert_pos_crop(4, points) == [
XYPoint(0, -4096),
XYPoint(20, -4076),
]
assert convert_pos_crop(5, points) == [
XYPoint(-4096, -4096),
XYPoint(-4076, -4076),
]
def test_internal_generate_matrix() -> None:
from genshinmap.utils import _generate_matrix
assert _generate_matrix(0, 3, 8) == list(range(12))
assert _generate_matrix(0, 1, 4) == [0, 1, 4, 5]
assert _generate_matrix(0, 2, 4) == [0, 1, 2, 4, 5, 6]
assert _generate_matrix(0, 0, 4) == [0, 4]
assert _generate_matrix(0, 0, 8) == [0, 4, 8]
assert _generate_matrix(0, 2, 0) == [0, 1, 2]
assert _generate_matrix(0, 2, 8) == [0, 1, 2, 4, 5, 6, 8, 9, 10]
assert _generate_matrix(1, 3, 5) == [1, 2, 3, 5, 6, 7]
def test_internal_pos_to_index() -> None:
from genshinmap.utils import _pos_to_index
assert _pos_to_index(0, 0) == 0
assert _pos_to_index(4096, 0) == 1
assert _pos_to_index(0, 4096) == 4
assert _pos_to_index(4096, 4096) == 5
def test_crop_image_and_points() -> None:
from genshinmap.models import XYPoint
from genshinmap.utils import crop_image_and_points
assert crop_image_and_points(
[XYPoint(x=4200, y=8000), XYPoint(x=4150, y=10240)]
) == ([5, 9], 0, [XYPoint(x=104, y=3904), XYPoint(x=54, y=6144)])
points = [
XYPoint(x=0, y=0),
XYPoint(x=20, y=20),
XYPoint(x=4096, y=0),
XYPoint(x=4116, y=20),
XYPoint(x=0, y=4096),
XYPoint(x=20, y=4116),
XYPoint(x=4096, y=4096),
XYPoint(x=4116, y=4116),
]
assert crop_image_and_points(points) == (
[0, 1, 4, 5],
1,
points,
)

View File

@ -0,0 +1,188 @@
import random
from io import BytesIO
from pathlib import Path
from time import time
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import FileResponse, StreamingResponse
from PIL import Image
from .GenshinMap.genshinmap import img, models, request, utils
from .logger import logger
Image.MAX_IMAGE_PIXELS = 333120000
router = APIRouter(prefix='/get_map')
TEXT_PATH = Path(__file__).parent / 'texture2d'
mark_quest = Image.open(TEXT_PATH / 'mark_quest.png').resize((32, 32))
MAP = Path(__file__).parent / 'map_data'
RESOURCE_PATH = Path(__file__).parent / 'resource_data'
CHASM_PATH = MAP / 'chasm.png'
ENKANOMIYA_PATH = MAP / 'enkanomiya.png'
TEYVAT_PATH = MAP / 'teyvat.png'
MAP_ID_DICT = {
'2': models.MapID.teyvat, # 提瓦特
'9': models.MapID.chasm, # 层岩巨渊
'7': models.MapID.enkanomiya, # 渊下宫
# MapID.golden_apple_archipelago, # 金苹果群岛
}
@router.on_event('startup')
async def create_genshin_map():
if CHASM_PATH.exists() and ENKANOMIYA_PATH.exists() and TEYVAT_PATH.exists():
logger.info('****************** 开始地图API服务 *****************')
return
logger.info('****************** 地图API服务进行初始化 *****************')
mark_god_pic = Image.open(TEXT_PATH / 'mark_god.png')
mark_trans_pic = Image.open(TEXT_PATH / 'mark_trans.png')
for map_id in models.MapID:
maps = await request.get_maps(map_id)
points = await request.get_points(map_id)
# 获取七天神像锚点
mark_god = utils.get_points_by_id(2, points)
# 获取传送锚点
mark_trans = utils.get_points_by_id(3, points)
# 转换两个锚点为标准坐标
mark_god_converted = utils.convert_pos(mark_god, maps.detail.origin)
mark_trans_converted = utils.convert_pos(mark_trans, maps.detail.origin)
maps = await request.get_maps(map_id)
map_img = await utils.make_map(maps.detail)
for mark_god_point in mark_god_converted:
map_img.paste(
mark_god_pic,
(int(mark_god_point.x) - 32, int(mark_god_point.y) - 64),
mark_god_pic,
)
for mark_trans_point in mark_trans_converted:
map_img.paste(
mark_trans_pic,
(int(mark_trans_point.x) - 32, int(mark_trans_point.y) - 64),
mark_trans_pic,
)
if not MAP.exists():
MAP.mkdir()
map_img.save(MAP / f'{map_id.name}.png')
logger.info('****************** 开始地图API服务 *****************')
@router.get('')
async def get_map_by_point(resource_name=Query(None), map_id=Query(str)):
req_id = random.randint(10000, 99999)
prefix = f'>> [请求序列:{req_id}]'
logger.info(f'{prefix} 收到资源点访问请求! [资源名称] {resource_name} [地图ID] {map_id}')
ERROR = {
'retcode': -1,
'message': f'该资源点 - {resource_name} 不存在!',
}
# 校验map_id有效性
if map_id not in MAP_ID_DICT:
logger.warning(f'{prefix} 请求失败! 原因: 该地图ID [{map_id}] 不存在!')
return {
'retcode': -1,
'message': f'该地图ID - {map_id} 不存在!',
}
# 寻找主地图的缓存
map_data = MAP_ID_DICT[map_id]
map_path = MAP / f'{map_data.name}.png'
# 寻找保存点
if not RESOURCE_PATH.exists():
RESOURCE_PATH.mkdir()
save_path = RESOURCE_PATH / f'{map_data.name}_{resource_name}.jpg'
# 如果存在缓存,直接回复
if save_path.exists():
logger.info(f'{prefix} [成功] [资源名称] {resource_name} 已有缓存, 直接发送!')
return FileResponse(save_path)
logger.info(f'{prefix} [资源名称] {resource_name} 暂无缓存, 开始执行绘制...')
maps = await request.get_maps(map_id)
labels = await request.get_labels(map_id)
# 请求资源ID
resource_id = 0
for label in labels:
for child in label.children:
if resource_name == child.name:
resource_id = child.id
resource_name = child.name
break
if resource_id == 0:
logger.warning(f'{prefix} 请求失败! 原因: 该资源点 [{resource_name}] 不存在!')
return ERROR
# 请求坐标点
points = await request.get_points(map_id)
transmittable = utils.get_points_by_id(resource_id, points)
# 转换坐标
transmittable_converted = utils.convert_pos(transmittable, maps.detail.origin)
# 进行最密点获取,暂时废弃
# if len(transmittable_converted) >= 3:
# group_point = img.k_means_points(transmittable_converted)
# 如果资源点不存在,返回错误
if len(transmittable_converted) == 0:
return ERROR
else:
# 计算极限坐标
up = 20000
down = 0
left = 20000
right = 0
for point in transmittable_converted:
if point.x >= right:
right = point.x
if point.x <= left:
left = point.x
if point.y >= down:
down = point.y
if point.y <= up:
up = point.y
offset = 100
group_point = [
[
models.XYPoint(left - offset, up - offset),
models.XYPoint(right + offset, down + offset),
transmittable_converted,
]
]
# 打开主地图
genshin_map = Image.open(map_path)
# 计算裁切点
lt_point = group_point[0][0]
rb_point = group_point[0][1]
# 开始裁切
genshin_map = genshin_map.crop(
(int(lt_point.x), int(lt_point.y), int(rb_point.x), int(rb_point.y))
)
# 在地图上绘制资源点
for point in group_point[0][2]:
point_trans = (
int(point.x) - int(lt_point.x),
int(point.y) - int(lt_point.y),
)
genshin_map.paste(
mark_quest, (point_trans[0] - 16, point_trans[1] - 16), mark_quest
)
# 转换RGB图
genshin_map = genshin_map.convert('RGB')
# 转为Bytes,暂时废弃
# result_buffer = BytesIO()
# genshin_map.save(result_buffer, format='PNG', quality=80, subsampling=0)
# 进行保存
genshin_map.save(save_path, 'JPEG', quality=85)
logger.info(f'{prefix} [成功] [资源名称] {resource_name} 绘制完成!')
return FileResponse(save_path)

View File

@ -0,0 +1,24 @@
import logging
import fastapi
import rollbar
from rollbar.contrib.fastapi import LoggerMiddleware
from rollbar.logger import RollbarHandler
# Initialize Rollbar SDK with your server-side access token
rollbar.init(
'ACCESS_TOKEN',
environment='staging',
handler='async',
)
# Set root logger to log DEBUG and above
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# Report ERROR and above to Rollbar
rollbar_handler = RollbarHandler()
rollbar_handler.setLevel(logging.ERROR)
# Attach Rollbar handler to the root logger
logger.addHandler(rollbar_handler)

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

View File

@ -0,0 +1,26 @@
import logging
from fastapi import FastAPI
from GetMapImage import get_map_image
gsm = FastAPI(title='GsMAP', description='GenshinMap API')
gsm.include_router(get_map_image.router, prefix='/map')
@gsm.get("/")
def read_root():
return {'message': '这是一个原神地图API'}
class EndpointFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
return record.getMessage().find('/') == -1
# Filter out /endpoint
logging.getLogger("uvicorn.access").addFilter(EndpointFilter())
if __name__ == '__main__':
import uvicorn
uvicorn.run(app=gsm, host='0.0.0.0', port=5000)