2017-12-05 11:42:09 +00:00
|
|
|
# Pyrogram - Telegram MTProto API Client Library for Python
|
2019-01-01 11:36:16 +00:00
|
|
|
# Copyright (C) 2017-2019 Dan Tès <https://github.com/delivrance>
|
2017-12-05 11:42:09 +00:00
|
|
|
#
|
|
|
|
# This file is part of Pyrogram.
|
|
|
|
#
|
|
|
|
# Pyrogram is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Pyrogram is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public License
|
|
|
|
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
import asyncio
|
2017-12-05 11:42:09 +00:00
|
|
|
import logging
|
2017-12-14 08:34:58 +00:00
|
|
|
import math
|
|
|
|
import os
|
2017-12-25 11:30:48 +00:00
|
|
|
import re
|
2018-03-21 16:39:53 +00:00
|
|
|
import shutil
|
2018-03-21 12:39:23 +00:00
|
|
|
import tempfile
|
2017-12-05 11:42:09 +00:00
|
|
|
import time
|
|
|
|
from configparser import ConfigParser
|
2017-12-14 08:34:58 +00:00
|
|
|
from hashlib import sha256, md5
|
2018-10-12 12:17:03 +00:00
|
|
|
from importlib import import_module
|
2018-10-23 13:43:49 +00:00
|
|
|
from pathlib import Path
|
2017-12-17 12:52:33 +00:00
|
|
|
from signal import signal, SIGINT, SIGTERM, SIGABRT
|
2018-12-19 13:55:48 +00:00
|
|
|
from typing import Union, List
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
from pyrogram.api import functions, types
|
2019-06-03 12:19:50 +00:00
|
|
|
from pyrogram.api.core import TLObject
|
2019-04-13 12:41:14 +00:00
|
|
|
from pyrogram.client.handlers import DisconnectHandler
|
|
|
|
from pyrogram.client.handlers.handler import Handler
|
|
|
|
from pyrogram.client.methods.password.utils import compute_check
|
|
|
|
from pyrogram.crypto import AES
|
2019-03-25 10:10:46 +00:00
|
|
|
from pyrogram.errors import (
|
2019-09-08 06:42:12 +00:00
|
|
|
PhoneMigrate, NetworkMigrate, SessionPasswordNeeded,
|
|
|
|
FloodWait, PeerIdInvalid, VolumeLocNotFound, UserMigrate, ChannelPrivate, AuthBytesInvalid,
|
2019-08-13 11:15:58 +00:00
|
|
|
BadRequest)
|
2017-12-05 11:42:09 +00:00
|
|
|
from pyrogram.session import Auth, Session
|
2019-03-25 10:10:46 +00:00
|
|
|
from .ext import utils, Syncer, BaseClient, Dispatcher
|
2019-05-06 15:39:57 +00:00
|
|
|
from .ext.utils import ainput
|
2018-05-07 12:30:55 +00:00
|
|
|
from .methods import Methods
|
2019-06-19 14:10:37 +00:00
|
|
|
from .storage import Storage, FileStorage, MemoryStorage
|
2019-08-13 11:15:58 +00:00
|
|
|
from .types import User, SentCode, TermsOfService
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2019-09-08 17:24:06 +00:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-03-22 13:36:46 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
class Client(Methods, BaseClient):
|
2019-05-16 19:28:34 +00:00
|
|
|
"""Pyrogram Client, the main means for interacting with Telegram.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-03-25 19:41:19 +00:00
|
|
|
session_name (``str``):
|
2019-06-19 14:10:37 +00:00
|
|
|
Pass a string of your choice to give a name to the client session, e.g.: "*my_account*". This name will be
|
|
|
|
used to save a file on disk that stores details needed to reconnect without asking again for credentials.
|
|
|
|
Alternatively, if you don't want a file to be saved on disk, pass the special name "**:memory:**" to start
|
|
|
|
an in-memory session that will be discarded as soon as you stop the Client. In order to reconnect again
|
|
|
|
using a memory storage without having to login again, you can use
|
|
|
|
:meth:`~pyrogram.Client.export_session_string` before stopping the client to get a session string you can
|
|
|
|
pass here as argument.
|
2018-01-03 16:39:12 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
api_id (``int`` | ``str``, *optional*):
|
|
|
|
The *api_id* part of your Telegram API Key, as integer. E.g.: "12345".
|
2018-04-05 10:55:34 +00:00
|
|
|
This is an alternative way to pass it if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
api_hash (``str``, *optional*):
|
2019-05-23 16:59:29 +00:00
|
|
|
The *api_hash* part of your Telegram API Key, as string. E.g.: "0123456789abcdef0123456789abcdef".
|
2019-09-08 06:42:12 +00:00
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
2018-02-21 12:31:27 +00:00
|
|
|
|
2018-06-24 17:08:52 +00:00
|
|
|
app_version (``str``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Application version. Defaults to "Pyrogram |version|".
|
2018-06-24 17:08:52 +00:00
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
device_model (``str``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Device model. Defaults to *platform.python_implementation() + " " + platform.python_version()*.
|
2018-06-24 17:08:52 +00:00
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
system_version (``str``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Operating System version. Defaults to *platform.system() + " " + platform.release()*.
|
2018-06-24 17:08:52 +00:00
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
lang_code (``str``, *optional*):
|
|
|
|
Code of the language used on the client, in ISO 639-1 standard. Defaults to "en".
|
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-09-22 12:21:55 +00:00
|
|
|
ipv6 (``bool``, *optional*):
|
|
|
|
Pass True to connect to Telegram using IPv6.
|
|
|
|
Defaults to False (IPv4).
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
proxy (``dict``, *optional*):
|
2018-03-24 14:02:03 +00:00
|
|
|
Your SOCKS5 Proxy settings as dict,
|
|
|
|
e.g.: *dict(hostname="11.22.33.44", port=1080, username="user", password="pass")*.
|
2019-09-08 06:42:12 +00:00
|
|
|
The *username* and *password* can be omitted if your proxy doesn't require authorization.
|
2018-02-21 12:31:27 +00:00
|
|
|
This is an alternative way to setup a proxy if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
test_mode (``bool``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Enable or disable login to the test servers.
|
|
|
|
Only applicable for new sessions and will be ignored in case previously created sessions are loaded.
|
|
|
|
Defaults to False.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
2019-05-16 19:28:34 +00:00
|
|
|
bot_token (``str``, *optional*):
|
|
|
|
Pass your Bot API token to create a bot session, e.g.: "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11"
|
|
|
|
Only applicable for new sessions.
|
2019-08-08 13:07:46 +00:00
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
2019-05-16 19:28:34 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
phone_number (``str``, *optional*):
|
2019-01-08 13:28:52 +00:00
|
|
|
Pass your phone number as string (with your Country Code prefix included) to avoid entering it manually.
|
|
|
|
Only applicable for new sessions.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
phone_code (``str``, *optional*):
|
|
|
|
Pass the phone code as string (for test numbers only) to avoid entering it manually.
|
2018-01-24 20:46:28 +00:00
|
|
|
Only applicable for new sessions.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
password (``str``, *optional*):
|
2019-01-08 13:28:52 +00:00
|
|
|
Pass your Two-Step Verification password as string (if you have one) to avoid entering it manually.
|
|
|
|
Only applicable for new sessions.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
force_sms (``bool``, *optional*):
|
2018-04-01 16:17:20 +00:00
|
|
|
Pass True to force Telegram sending the authorization code via SMS.
|
|
|
|
Only applicable for new sessions.
|
2019-09-08 06:42:12 +00:00
|
|
|
Defaults to False.
|
2018-01-26 10:49:07 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
workers (``int``, *optional*):
|
2019-09-08 11:26:10 +00:00
|
|
|
Number of maximum concurrent workers for handling incoming updates.
|
2019-09-08 06:42:12 +00:00
|
|
|
Defaults to 4.
|
2018-04-22 10:41:50 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
workdir (``str``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Define a custom working directory. The working directory is the location in your filesystem where Pyrogram
|
|
|
|
will store your session files.
|
|
|
|
Defaults to the parent directory of the main script.
|
2018-05-11 11:10:49 +00:00
|
|
|
|
|
|
|
config_file (``str``, *optional*):
|
2019-09-08 06:42:12 +00:00
|
|
|
Path of the configuration file.
|
|
|
|
Defaults to ./config.ini
|
2018-10-13 09:14:26 +00:00
|
|
|
|
2019-01-16 14:40:02 +00:00
|
|
|
plugins (``dict``, *optional*):
|
2019-01-27 10:24:23 +00:00
|
|
|
Your Smart Plugins settings as dict, e.g.: *dict(root="plugins")*.
|
2019-09-08 06:42:12 +00:00
|
|
|
This is an alternative way setup plugins if you don't want to use the *config.ini* file.
|
2019-01-03 10:13:24 +00:00
|
|
|
|
|
|
|
no_updates (``bool``, *optional*):
|
|
|
|
Pass True to completely disable incoming updates for the current session.
|
|
|
|
When updates are disabled your client can't receive any new message.
|
|
|
|
Useful for batch programs that don't need to deal with updates.
|
|
|
|
Defaults to False (updates enabled and always received).
|
2019-01-03 11:20:42 +00:00
|
|
|
|
|
|
|
takeout (``bool``, *optional*):
|
2019-05-23 16:59:29 +00:00
|
|
|
Pass True to let the client use a takeout session instead of a normal one, implies *no_updates=True*.
|
2019-01-03 11:20:42 +00:00
|
|
|
Useful for exporting your Telegram data. Methods invoked inside a takeout session (such as get_history,
|
|
|
|
download_media, ...) are less prone to throw FloodWait exceptions.
|
|
|
|
Only available for users, bots will ignore this parameter.
|
|
|
|
Defaults to False (normal session).
|
2017-12-29 20:44:45 +00:00
|
|
|
"""
|
|
|
|
|
2019-03-22 11:39:15 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
2019-06-19 14:10:37 +00:00
|
|
|
session_name: Union[str, Storage],
|
2019-03-22 11:39:15 +00:00
|
|
|
api_id: Union[int, str] = None,
|
|
|
|
api_hash: str = None,
|
|
|
|
app_version: str = None,
|
|
|
|
device_model: str = None,
|
|
|
|
system_version: str = None,
|
|
|
|
lang_code: str = None,
|
|
|
|
ipv6: bool = False,
|
|
|
|
proxy: dict = None,
|
|
|
|
test_mode: bool = False,
|
2019-05-16 19:28:34 +00:00
|
|
|
bot_token: str = None,
|
2019-03-22 11:39:15 +00:00
|
|
|
phone_number: str = None,
|
2019-09-08 06:42:12 +00:00
|
|
|
phone_code: str = None,
|
2019-03-22 11:39:15 +00:00
|
|
|
password: str = None,
|
|
|
|
force_sms: bool = False,
|
|
|
|
workers: int = BaseClient.WORKERS,
|
|
|
|
workdir: str = BaseClient.WORKDIR,
|
|
|
|
config_file: str = BaseClient.CONFIG_FILE,
|
|
|
|
plugins: dict = None,
|
|
|
|
no_updates: bool = None,
|
|
|
|
takeout: bool = None
|
|
|
|
):
|
2018-05-07 12:30:55 +00:00
|
|
|
super().__init__()
|
|
|
|
|
2017-12-05 11:42:09 +00:00
|
|
|
self.session_name = session_name
|
2018-04-08 11:21:16 +00:00
|
|
|
self.api_id = int(api_id) if api_id else None
|
2018-04-05 10:55:34 +00:00
|
|
|
self.api_hash = api_hash
|
2018-06-24 16:47:49 +00:00
|
|
|
self.app_version = app_version
|
|
|
|
self.device_model = device_model
|
|
|
|
self.system_version = system_version
|
|
|
|
self.lang_code = lang_code
|
2018-06-13 11:38:14 +00:00
|
|
|
self.ipv6 = ipv6
|
2018-05-25 09:52:40 +00:00
|
|
|
# TODO: Make code consistent, use underscore for private/protected fields
|
|
|
|
self._proxy = proxy
|
2017-12-05 11:42:09 +00:00
|
|
|
self.test_mode = test_mode
|
2019-05-16 19:28:34 +00:00
|
|
|
self.bot_token = bot_token
|
2018-01-24 16:53:30 +00:00
|
|
|
self.phone_number = phone_number
|
|
|
|
self.phone_code = phone_code
|
2018-05-07 12:30:55 +00:00
|
|
|
self.password = password
|
|
|
|
self.force_sms = force_sms
|
2018-01-26 10:41:09 +00:00
|
|
|
self.workers = workers
|
2019-06-19 14:10:37 +00:00
|
|
|
self.workdir = Path(workdir)
|
|
|
|
self.config_file = Path(config_file)
|
2019-01-16 14:40:02 +00:00
|
|
|
self.plugins = plugins
|
2019-01-03 10:13:24 +00:00
|
|
|
self.no_updates = no_updates
|
2019-01-03 11:20:42 +00:00
|
|
|
self.takeout = takeout
|
2018-01-26 10:41:09 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
if isinstance(session_name, str):
|
|
|
|
if session_name == ":memory:" or len(session_name) >= MemoryStorage.SESSION_STRING_SIZE:
|
|
|
|
session_name = re.sub(r"[\n\s]+", "", session_name)
|
|
|
|
self.storage = MemoryStorage(session_name)
|
|
|
|
else:
|
|
|
|
self.storage = FileStorage(session_name, self.workdir)
|
|
|
|
elif isinstance(session_name, Storage):
|
|
|
|
self.storage = session_name
|
|
|
|
else:
|
|
|
|
raise ValueError("Unknown storage engine")
|
|
|
|
|
2018-04-06 16:48:41 +00:00
|
|
|
self.dispatcher = Dispatcher(self, workers)
|
2018-02-08 19:46:47 +00:00
|
|
|
|
2018-09-17 16:53:04 +00:00
|
|
|
def __enter__(self):
|
2018-12-15 08:50:35 +00:00
|
|
|
return self.start()
|
2018-09-17 16:53:04 +00:00
|
|
|
|
2018-12-15 08:50:35 +00:00
|
|
|
def __exit__(self, *args):
|
2019-09-08 09:58:34 +00:00
|
|
|
try:
|
|
|
|
self.stop()
|
|
|
|
except ConnectionError:
|
|
|
|
pass
|
2018-09-17 16:53:04 +00:00
|
|
|
|
2019-03-16 18:56:25 +00:00
|
|
|
async def __aenter__(self):
|
|
|
|
return await self.start()
|
|
|
|
|
|
|
|
async def __aexit__(self, *args):
|
|
|
|
await self.stop()
|
|
|
|
|
2018-05-25 09:52:40 +00:00
|
|
|
@property
|
|
|
|
def proxy(self):
|
|
|
|
return self._proxy
|
|
|
|
|
|
|
|
@proxy.setter
|
|
|
|
def proxy(self, value):
|
2019-01-16 14:46:46 +00:00
|
|
|
if value is None:
|
|
|
|
self._proxy = None
|
|
|
|
return
|
|
|
|
|
|
|
|
if self._proxy is None:
|
|
|
|
self._proxy = {}
|
|
|
|
|
|
|
|
self._proxy["enabled"] = bool(value.get("enabled", True))
|
2018-05-25 09:52:40 +00:00
|
|
|
self._proxy.update(value)
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def connect(self) -> bool:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""
|
|
|
|
Connect the client to Telegram servers.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Returns:
|
|
|
|
``bool``: On success, in case the passed-in session is authorized, True is returned. Otherwise, in case
|
|
|
|
the session needs to be authorized, False is returned.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
|
|
|
Raises:
|
2019-08-13 11:15:58 +00:00
|
|
|
ConnectionError: In case you try to connect an already connected client.
|
|
|
|
"""
|
|
|
|
if self.is_connected:
|
|
|
|
raise ConnectionError("Client is already connected")
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-07-14 09:28:49 +00:00
|
|
|
self.load_config()
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.load_session()
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.session = Session(self, self.storage.dc_id(), self.storage.auth_key())
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.start()
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
self.is_connected = True
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
return bool(self.storage.user_id())
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def disconnect(self):
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Disconnect the client from Telegram servers.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConnectionError: In case you try to disconnect an already disconnected client or in case you try to
|
|
|
|
disconnect a client that needs to be terminated first.
|
2018-01-24 20:46:28 +00:00
|
|
|
"""
|
2019-08-13 11:15:58 +00:00
|
|
|
if not self.is_connected:
|
|
|
|
raise ConnectionError("Client is already disconnected")
|
2018-04-03 09:45:19 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
if self.is_initialized:
|
|
|
|
raise ConnectionError("Can't disconnect an initialized client")
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.stop()
|
2019-08-13 11:15:58 +00:00
|
|
|
self.storage.close()
|
|
|
|
self.is_connected = False
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def initialize(self):
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Initialize the client by starting up workers.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
This method will start updates and download workers.
|
|
|
|
It will also load plugins and start the internal dispatcher.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Raises:
|
|
|
|
ConnectionError: In case you try to initialize a disconnected client or in case you try to initialize an
|
|
|
|
already initialized client.
|
|
|
|
"""
|
|
|
|
if not self.is_connected:
|
|
|
|
raise ConnectionError("Can't initialize a disconnected client")
|
2019-01-03 11:20:42 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
if self.is_initialized:
|
|
|
|
raise ConnectionError("Client is already initialized")
|
2018-04-13 13:47:07 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
self.load_plugins()
|
2018-06-13 18:00:19 +00:00
|
|
|
|
2019-06-24 15:33:33 +00:00
|
|
|
for _ in range(Client.UPDATES_WORKERS):
|
|
|
|
self.updates_worker_tasks.append(
|
2019-08-03 08:37:48 +00:00
|
|
|
asyncio.ensure_future(self.updates_worker())
|
2019-06-24 15:33:33 +00:00
|
|
|
)
|
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.info("Started {} UpdatesWorkerTasks".format(Client.UPDATES_WORKERS))
|
2018-06-20 09:41:22 +00:00
|
|
|
|
|
|
|
for _ in range(Client.DOWNLOAD_WORKERS):
|
|
|
|
self.download_worker_tasks.append(
|
2019-08-03 08:37:48 +00:00
|
|
|
asyncio.ensure_future(self.download_worker())
|
2018-06-20 09:41:22 +00:00
|
|
|
)
|
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.info("Started {} DownloadWorkerTasks".format(Client.DOWNLOAD_WORKERS))
|
2018-06-20 09:41:22 +00:00
|
|
|
|
|
|
|
await self.dispatcher.start()
|
|
|
|
await Syncer.add(self)
|
2018-04-06 16:48:41 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
self.is_initialized = True
|
2018-12-15 08:50:35 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def terminate(self):
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Terminate the client by shutting down workers.
|
2018-11-03 09:49:11 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
This method does the opposite of :meth:`~Client.initialize`.
|
|
|
|
It will stop the dispatcher and shut down updates and download workers.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2018-11-03 09:49:11 +00:00
|
|
|
Raises:
|
2019-08-13 11:15:58 +00:00
|
|
|
ConnectionError: In case you try to terminate a client that is already terminated.
|
2018-01-03 16:39:12 +00:00
|
|
|
"""
|
2019-08-13 11:15:58 +00:00
|
|
|
if not self.is_initialized:
|
|
|
|
raise ConnectionError("Client is already terminated")
|
2018-04-03 12:54:34 +00:00
|
|
|
|
2019-01-03 11:20:42 +00:00
|
|
|
if self.takeout_id:
|
2019-01-07 08:37:26 +00:00
|
|
|
await self.send(functions.account.FinishTakeoutSession())
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning("Takeout session {} finished".format(self.takeout_id))
|
2019-01-03 11:20:42 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
await Syncer.remove(self)
|
|
|
|
await self.dispatcher.stop()
|
|
|
|
|
|
|
|
for _ in range(Client.DOWNLOAD_WORKERS):
|
|
|
|
self.download_queue.put_nowait(None)
|
|
|
|
|
|
|
|
for task in self.download_worker_tasks:
|
|
|
|
await task
|
|
|
|
|
|
|
|
self.download_worker_tasks.clear()
|
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.info("Stopped {} DownloadWorkerTasks".format(Client.DOWNLOAD_WORKERS))
|
2018-06-20 09:41:22 +00:00
|
|
|
|
2019-06-24 15:33:33 +00:00
|
|
|
for _ in range(Client.UPDATES_WORKERS):
|
|
|
|
self.updates_queue.put_nowait(None)
|
|
|
|
|
|
|
|
for task in self.updates_worker_tasks:
|
|
|
|
await task
|
|
|
|
|
|
|
|
self.updates_worker_tasks.clear()
|
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.info("Stopped {} UpdatesWorkerTasks".format(Client.UPDATES_WORKERS))
|
2018-06-20 09:41:22 +00:00
|
|
|
|
|
|
|
for media_session in self.media_sessions.values():
|
|
|
|
await media_session.stop()
|
|
|
|
|
|
|
|
self.media_sessions.clear()
|
2018-05-01 19:08:47 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
self.is_initialized = False
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def send_code(self, phone_number: str) -> SentCode:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Send the confirmation code to the given phone number.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
phone_number (``str``):
|
|
|
|
Phone number in international format (includes the country prefix).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`SentCode`: On success, an object containing information on the sent confirmation code is returned.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the phone number is invalid.
|
|
|
|
"""
|
|
|
|
phone_number = phone_number.strip(" +")
|
|
|
|
|
|
|
|
while True:
|
2019-07-14 09:28:49 +00:00
|
|
|
try:
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-07-14 09:28:49 +00:00
|
|
|
functions.auth.SendCode(
|
2019-08-13 11:15:58 +00:00
|
|
|
phone_number=phone_number,
|
2019-07-14 09:28:49 +00:00
|
|
|
api_id=self.api_id,
|
|
|
|
api_hash=self.api_hash,
|
|
|
|
settings=types.CodeSettings()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except (PhoneMigrate, NetworkMigrate) as e:
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.stop()
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.dc_id(e.x)
|
2019-09-14 18:42:06 +00:00
|
|
|
self.storage.auth_key(await Auth(self, self.storage.dc_id()).create())
|
2019-09-14 17:57:20 +00:00
|
|
|
self.session = Session(self, self.storage.dc_id(), self.storage.auth_key())
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.start()
|
2019-07-14 09:28:49 +00:00
|
|
|
else:
|
2019-08-13 11:15:58 +00:00
|
|
|
return SentCode._parse(r)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def resend_code(self, phone_number: str, phone_code_hash: str) -> SentCode:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Re-send the confirmation code using a different type.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
The type of the code to be re-sent is specified in the *next_type* attribute of the :obj:`SentCode` object
|
|
|
|
returned by :meth:`send_code`.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Parameters:
|
|
|
|
phone_number (``str``):
|
|
|
|
Phone number in international format (includes the country prefix).
|
|
|
|
|
|
|
|
phone_code_hash (``str``):
|
|
|
|
Confirmation code identifier.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`SentCode`: On success, an object containing information on the re-sent confirmation code is returned.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the arguments are invalid.
|
|
|
|
"""
|
|
|
|
phone_number = phone_number.strip(" +")
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.ResendCode(
|
|
|
|
phone_number=phone_number,
|
|
|
|
phone_code_hash=phone_code_hash
|
2019-07-14 09:28:49 +00:00
|
|
|
)
|
2019-08-13 11:15:58 +00:00
|
|
|
)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
return SentCode._parse(r)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def sign_in(self, phone_number: str, phone_code_hash: str, phone_code: str) -> Union[
|
|
|
|
User, TermsOfService, bool]:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Authorize a user in Telegram with a valid confirmation code.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Parameters:
|
|
|
|
phone_number (``str``):
|
|
|
|
Phone number in international format (includes the country prefix).
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
phone_code_hash (``str``):
|
|
|
|
Code identifier taken from the result of :meth:`~Client.send_code`.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
phone_code (``str``):
|
|
|
|
The valid confirmation code you received (either as Telegram message or as SMS in your phone number).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`User` | :obj:`TermsOfService` | bool: On success, in case the authorization completed, the user is
|
|
|
|
returned. In case the phone number needs to be registered first AND the terms of services accepted (with
|
|
|
|
:meth:`~Client.accept_terms_of_service`), an object containing them is returned. In case the phone number
|
|
|
|
needs to be registered, but the terms of services don't need to be accepted, False is returned instead.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the arguments are invalid.
|
|
|
|
SessionPasswordNeeded: In case a password is needed to sign in.
|
|
|
|
"""
|
|
|
|
phone_number = phone_number.strip(" +")
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.SignIn(
|
|
|
|
phone_number=phone_number,
|
|
|
|
phone_code_hash=phone_code_hash,
|
|
|
|
phone_code=phone_code
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if isinstance(r, types.auth.AuthorizationSignUpRequired):
|
|
|
|
if r.terms_of_service:
|
|
|
|
return TermsOfService._parse(terms_of_service=r.terms_of_service)
|
|
|
|
|
|
|
|
return False
|
|
|
|
else:
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(r.user.id)
|
|
|
|
self.storage.is_bot(False)
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
return User._parse(self, r.user)
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def sign_up(self, phone_number: str, phone_code_hash: str, first_name: str, last_name: str = "") -> User:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Register a new user in Telegram.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
phone_number (``str``):
|
|
|
|
Phone number in international format (includes the country prefix).
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
phone_code_hash (``str``):
|
|
|
|
Code identifier taken from the result of :meth:`~Client.send_code`.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
first_name (``str``):
|
|
|
|
New user first name.
|
|
|
|
|
|
|
|
last_name (``str``, *optional*):
|
|
|
|
New user last name. Defaults to "" (empty string).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`User`: On success, the new registered user is returned.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the arguments are invalid.
|
|
|
|
"""
|
|
|
|
phone_number = phone_number.strip(" +")
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.SignUp(
|
|
|
|
phone_number=phone_number,
|
|
|
|
first_name=first_name,
|
|
|
|
last_name=last_name,
|
|
|
|
phone_code_hash=phone_code_hash
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(r.user.id)
|
|
|
|
self.storage.is_bot(False)
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
return User._parse(self, r.user)
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def sign_in_bot(self, bot_token: str) -> User:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Authorize a bot using its bot token generated by BotFather.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
bot_token (``str``):
|
|
|
|
The bot token generated by BotFather
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`User`: On success, the bot identity is return in form of a user object.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the bot token is invalid.
|
|
|
|
"""
|
2019-07-14 09:28:49 +00:00
|
|
|
while True:
|
2019-08-13 11:15:58 +00:00
|
|
|
try:
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.ImportBotAuthorization(
|
|
|
|
flags=0,
|
|
|
|
api_id=self.api_id,
|
|
|
|
api_hash=self.api_hash,
|
|
|
|
bot_auth_token=bot_token
|
|
|
|
)
|
2019-07-14 09:28:49 +00:00
|
|
|
)
|
2019-08-13 11:15:58 +00:00
|
|
|
except UserMigrate as e:
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.stop()
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.dc_id(e.x)
|
2019-09-14 18:42:06 +00:00
|
|
|
self.storage.auth_key(await Auth(self, self.storage.dc_id()).create())
|
2019-09-14 17:57:20 +00:00
|
|
|
self.session = Session(self, self.storage.dc_id(), self.storage.auth_key())
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.session.start()
|
2019-08-13 11:15:58 +00:00
|
|
|
else:
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(r.user.id)
|
|
|
|
self.storage.is_bot(True)
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
return User._parse(self, r.user)
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def get_password_hint(self) -> str:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Get your Two-Step Verification password hint.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``str``: On success, the password hint as string is returned.
|
|
|
|
"""
|
2019-09-07 11:40:26 +00:00
|
|
|
return (await self.send(functions.account.GetPassword())).hint
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def check_password(self, password: str) -> User:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Check your Two-Step Verification password and log in.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
password (``str``):
|
|
|
|
Your Two-Step Verification password.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`User`: On success, the authorized user is returned.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the password is invalid.
|
|
|
|
"""
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.CheckPassword(
|
|
|
|
password=compute_check(
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.send(functions.account.GetPassword()),
|
2019-08-13 11:15:58 +00:00
|
|
|
password
|
2019-07-14 09:28:49 +00:00
|
|
|
)
|
2019-08-13 11:15:58 +00:00
|
|
|
)
|
|
|
|
)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(r.user.id)
|
|
|
|
self.storage.is_bot(False)
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
return User._parse(self, r.user)
|
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def send_recovery_code(self) -> str:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Send a code to your email to recover your password.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``str``: On success, the hidden email pattern is returned and a recovery code is sent to that email.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case no recovery email was set up.
|
|
|
|
"""
|
2019-09-07 11:40:26 +00:00
|
|
|
return (await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.RequestPasswordRecovery()
|
2019-09-07 11:40:26 +00:00
|
|
|
)).email_pattern
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def recover_password(self, recovery_code: str) -> User:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Recover your password with a recovery code and log in.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
recovery_code (``str``):
|
|
|
|
The recovery code sent via email.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:obj:`User`: On success, the authorized user is returned and the Two-Step Verification password reset.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
BadRequest: In case the recovery code is invalid.
|
|
|
|
"""
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.auth.RecoverPassword(
|
|
|
|
code=recovery_code
|
2019-07-14 09:28:49 +00:00
|
|
|
)
|
2019-08-13 11:15:58 +00:00
|
|
|
)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(r.user.id)
|
|
|
|
self.storage.is_bot(False)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
return User._parse(self, r.user)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def accept_terms_of_service(self, terms_of_service_id: str) -> bool:
|
2019-08-13 11:15:58 +00:00
|
|
|
"""Accept the given terms of service.
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Parameters:
|
|
|
|
terms_of_service_id (``str``):
|
|
|
|
The terms of service identifier.
|
|
|
|
"""
|
2019-09-07 11:40:26 +00:00
|
|
|
r = await self.send(
|
2019-08-13 11:15:58 +00:00
|
|
|
functions.help.AcceptTermsOfService(
|
|
|
|
id=types.DataJSON(
|
|
|
|
data=terms_of_service_id
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
assert r
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
return True
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
async def authorize(self) -> User:
|
2019-09-08 06:42:12 +00:00
|
|
|
if self.bot_token:
|
2019-09-07 11:40:26 +00:00
|
|
|
return await self.sign_in_bot(self.bot_token)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
|
|
|
while True:
|
2019-09-08 06:42:12 +00:00
|
|
|
try:
|
|
|
|
if not self.phone_number:
|
|
|
|
while True:
|
2019-09-08 11:26:10 +00:00
|
|
|
value = await ainput("Enter phone number or bot token: ")
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
if not value:
|
|
|
|
continue
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
confirm = input("Is \"{}\" correct? (y/N): ".format(value)).lower()
|
|
|
|
|
|
|
|
if confirm == "y":
|
|
|
|
break
|
|
|
|
|
|
|
|
if ":" in value:
|
|
|
|
self.bot_token = value
|
2019-09-08 11:26:10 +00:00
|
|
|
return await self.sign_in_bot(value)
|
2019-09-08 06:42:12 +00:00
|
|
|
else:
|
|
|
|
self.phone_number = value
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-07 11:40:26 +00:00
|
|
|
sent_code = await self.send_code(self.phone_number)
|
2019-08-13 11:15:58 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.phone_number = None
|
2019-09-08 06:42:12 +00:00
|
|
|
self.bot_token = None
|
2019-07-14 09:28:49 +00:00
|
|
|
except FloodWait as e:
|
2019-08-13 11:15:58 +00:00
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
2019-07-14 09:28:49 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
if self.force_sms:
|
2019-09-07 11:40:26 +00:00
|
|
|
sent_code = await self.resend_code(self.phone_number, sent_code.phone_code_hash)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
print("The confirmation code has been sent via {}".format(
|
|
|
|
{
|
|
|
|
"app": "Telegram app",
|
|
|
|
"sms": "SMS",
|
|
|
|
"call": "phone call",
|
|
|
|
"flash_call": "phone flash call"
|
|
|
|
}[sent_code.type]
|
|
|
|
))
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
while True:
|
2019-09-08 06:42:12 +00:00
|
|
|
if not self.phone_code:
|
2019-09-07 11:40:26 +00:00
|
|
|
self.phone_code = await ainput("Enter confirmation code: ")
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
try:
|
2019-09-07 11:40:26 +00:00
|
|
|
signed_in = await self.sign_in(self.phone_number, sent_code.phone_code_hash, self.phone_code)
|
2019-08-13 11:15:58 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.phone_code = None
|
|
|
|
except SessionPasswordNeeded as e:
|
|
|
|
print(e.MESSAGE)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
while True:
|
2019-09-07 11:40:26 +00:00
|
|
|
print("Password hint: {}".format(await self.get_password_hint()))
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
if not self.password:
|
2019-09-07 11:40:26 +00:00
|
|
|
self.password = await ainput("Enter password (empty to recover): ")
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
try:
|
2019-09-08 06:42:12 +00:00
|
|
|
if not self.password:
|
2019-09-07 11:40:26 +00:00
|
|
|
confirm = await ainput("Confirm password recovery (y/n): ")
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
if confirm == "y":
|
2019-09-07 11:40:26 +00:00
|
|
|
email_pattern = await self.send_recovery_code()
|
2019-08-13 11:15:58 +00:00
|
|
|
print("The recovery code has been sent to {}".format(email_pattern))
|
|
|
|
|
|
|
|
while True:
|
2019-09-07 11:40:26 +00:00
|
|
|
recovery_code = await ainput("Enter recovery code: ")
|
2019-08-13 11:15:58 +00:00
|
|
|
|
|
|
|
try:
|
2019-09-07 11:40:26 +00:00
|
|
|
return await self.recover_password(recovery_code)
|
2019-08-13 11:15:58 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
except FloodWait as e:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
|
|
|
except Exception as e:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.error(e, exc_info=True)
|
2019-08-13 11:15:58 +00:00
|
|
|
raise
|
2019-09-08 06:42:12 +00:00
|
|
|
else:
|
2019-08-13 11:15:58 +00:00
|
|
|
self.password = None
|
|
|
|
else:
|
2019-09-07 11:40:26 +00:00
|
|
|
return await self.check_password(self.password)
|
2019-08-13 11:15:58 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.password = None
|
|
|
|
except FloodWait as e:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
|
|
|
except FloodWait as e:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
if isinstance(signed_in, User):
|
|
|
|
return signed_in
|
2019-07-14 09:28:49 +00:00
|
|
|
|
|
|
|
while True:
|
2019-09-08 11:26:10 +00:00
|
|
|
first_name = await ainput("Enter first name: ")
|
|
|
|
last_name = await ainput("Enter last name (empty to skip): ")
|
2019-08-13 11:15:58 +00:00
|
|
|
|
2019-07-14 09:28:49 +00:00
|
|
|
try:
|
2019-09-07 11:40:26 +00:00
|
|
|
signed_up = await self.sign_up(
|
2019-08-13 11:15:58 +00:00
|
|
|
self.phone_number,
|
|
|
|
sent_code.phone_code_hash,
|
2019-09-08 06:42:12 +00:00
|
|
|
first_name,
|
|
|
|
last_name
|
2019-07-14 09:28:49 +00:00
|
|
|
)
|
2019-08-13 11:15:58 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
print(e.MESSAGE)
|
2019-07-14 09:28:49 +00:00
|
|
|
except FloodWait as e:
|
2019-08-13 11:15:58 +00:00
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
2019-07-14 09:28:49 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
if isinstance(signed_in, TermsOfService):
|
|
|
|
print("\n" + signed_in.text + "\n")
|
2019-09-07 11:40:26 +00:00
|
|
|
await self.accept_terms_of_service(signed_in.id)
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
return signed_up
|
2019-07-14 09:28:49 +00:00
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
async def log_out(self):
|
2019-09-08 09:58:34 +00:00
|
|
|
"""Log out from Telegram and delete the *\\*.session* file.
|
|
|
|
|
2019-09-14 17:22:08 +00:00
|
|
|
When you log out, the current client is stopped and the storage session deleted.
|
2019-09-08 09:58:34 +00:00
|
|
|
No more API calls can be made until you start the client and re-authorize again.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``bool``: On success, True is returned.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
# Log out.
|
|
|
|
app.log_out()
|
|
|
|
"""
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.send(functions.auth.LogOut())
|
|
|
|
await self.stop()
|
2019-09-14 17:22:08 +00:00
|
|
|
self.storage.delete()
|
2019-09-08 09:58:34 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
async def start(self):
|
2019-07-25 09:17:28 +00:00
|
|
|
"""Start the client.
|
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
This method connects the client to Telegram and, in case of new sessions, automatically manages the full
|
|
|
|
authorization process using an interactive prompt.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Returns:
|
|
|
|
:obj:`Client`: The started client itself.
|
2019-01-16 14:46:46 +00:00
|
|
|
|
|
|
|
Raises:
|
2019-07-25 09:17:28 +00:00
|
|
|
ConnectionError: In case you try to start an already started client.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 4
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
app.start()
|
|
|
|
|
|
|
|
... # Call API methods
|
|
|
|
|
|
|
|
app.stop()
|
2019-01-16 14:46:46 +00:00
|
|
|
"""
|
2019-09-08 11:26:10 +00:00
|
|
|
is_authorized = await self.connect()
|
2019-01-16 14:46:46 +00:00
|
|
|
|
|
|
|
try:
|
2019-08-13 11:15:58 +00:00
|
|
|
if not is_authorized:
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.authorize()
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
if not self.storage.is_bot() and self.takeout:
|
2019-09-08 11:26:10 +00:00
|
|
|
self.takeout_id = (await self.send(functions.account.InitTakeoutSession())).id
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning("Takeout session {} initiated".format(self.takeout_id))
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.send(functions.updates.GetState())
|
2019-09-08 06:42:12 +00:00
|
|
|
except (Exception, KeyboardInterrupt):
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.disconnect()
|
2019-09-08 06:42:12 +00:00
|
|
|
raise
|
2019-08-13 11:15:58 +00:00
|
|
|
else:
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.initialize()
|
2019-08-13 11:15:58 +00:00
|
|
|
return self
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
async def stop(self):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Stop the Client.
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
This method disconnects the client from Telegram and stops the underlying tasks.
|
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Returns:
|
|
|
|
:obj:`Client`: The stopped client itself.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-01-16 14:46:46 +00:00
|
|
|
Raises:
|
2019-07-25 09:17:28 +00:00
|
|
|
ConnectionError: In case you try to stop an already stopped client.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 8
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
app.start()
|
|
|
|
|
|
|
|
... # Call API methods
|
|
|
|
|
|
|
|
app.stop()
|
2019-01-16 14:46:46 +00:00
|
|
|
"""
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.terminate()
|
|
|
|
await self.disconnect()
|
2018-04-13 17:09:00 +00:00
|
|
|
|
2018-12-15 08:50:35 +00:00
|
|
|
return self
|
|
|
|
|
2019-01-11 12:01:26 +00:00
|
|
|
async def restart(self):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Restart the Client.
|
2019-01-11 11:36:37 +00:00
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
This method will first call :meth:`~Client.stop` and then :meth:`~Client.start` in a row in order to restart
|
|
|
|
a client using a single method.
|
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
Returns:
|
|
|
|
:obj:`Client`: The restarted client itself.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
2019-01-11 11:36:37 +00:00
|
|
|
Raises:
|
2019-05-09 02:28:46 +00:00
|
|
|
ConnectionError: In case you try to restart a stopped Client.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 8
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
app.start()
|
|
|
|
|
|
|
|
... # Call API methods
|
|
|
|
|
|
|
|
app.restart()
|
|
|
|
|
|
|
|
... # Call other API methods
|
|
|
|
|
|
|
|
app.stop()
|
2019-01-11 11:36:37 +00:00
|
|
|
"""
|
2019-01-11 12:01:26 +00:00
|
|
|
await self.stop()
|
|
|
|
await self.start()
|
2019-01-11 11:36:37 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
return self
|
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
@staticmethod
|
2019-07-29 22:06:06 +00:00
|
|
|
async def idle(stop_signals: tuple = (SIGINT, SIGTERM, SIGABRT)):
|
2019-07-09 18:03:03 +00:00
|
|
|
"""Block the main script execution until a signal is received.
|
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
This static method will run an infinite loop in order to block the main script execution and prevent it from
|
|
|
|
exiting while having client(s) that are still running in the background.
|
2018-06-22 11:08:01 +00:00
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
It is useful for event-driven application only, that are, applications which react upon incoming Telegram
|
|
|
|
updates through handlers, rather than executing a set of methods sequentially.
|
2019-05-07 16:26:27 +00:00
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
The way Pyrogram works, it will keep your handlers in a pool of worker threads, which are executed concurrently
|
|
|
|
outside the main thread; calling idle() will ensure the client(s) will be kept alive by not letting the main
|
|
|
|
script to end, until you decide to quit.
|
|
|
|
|
|
|
|
Once a signal is received (e.g.: from CTRL+C) the inner infinite loop will break and your main script will
|
|
|
|
continue. Don't forget to call :meth:`~Client.stop` for each running client before the script ends.
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-06-22 11:08:01 +00:00
|
|
|
stop_signals (``tuple``, *optional*):
|
|
|
|
Iterable containing signals the signal handler will listen to.
|
2019-07-11 17:59:56 +00:00
|
|
|
Defaults to *(SIGINT, SIGTERM, SIGABRT)*.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 13
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app1 = Client("account1")
|
|
|
|
app2 = Client("account2")
|
|
|
|
app3 = Client("account3")
|
2018-06-24 17:09:22 +00:00
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
... # Set handlers up
|
|
|
|
|
|
|
|
app1.start()
|
|
|
|
app2.start()
|
|
|
|
app3.start()
|
|
|
|
|
|
|
|
Client.idle()
|
|
|
|
|
|
|
|
app1.stop()
|
|
|
|
app2.stop()
|
|
|
|
app3.stop()
|
|
|
|
"""
|
2019-05-07 16:26:27 +00:00
|
|
|
|
2019-09-08 06:42:12 +00:00
|
|
|
def signal_handler(_, __):
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.info("Stop signal received ({}). Exiting...".format(_))
|
2019-07-11 17:59:56 +00:00
|
|
|
Client.is_idling = False
|
2018-06-23 13:45:48 +00:00
|
|
|
|
2018-06-22 11:08:01 +00:00
|
|
|
for s in stop_signals:
|
2018-06-23 13:45:48 +00:00
|
|
|
signal(s, signal_handler)
|
2018-06-22 11:08:01 +00:00
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
Client.is_idling = True
|
2018-06-22 11:08:01 +00:00
|
|
|
|
2019-07-11 17:59:56 +00:00
|
|
|
while Client.is_idling:
|
2018-06-23 13:49:56 +00:00
|
|
|
await asyncio.sleep(1)
|
2018-06-22 11:08:01 +00:00
|
|
|
|
2018-06-24 09:39:50 +00:00
|
|
|
def run(self, coroutine=None):
|
2019-07-25 09:17:28 +00:00
|
|
|
"""Start the client, idle the main script and finally stop the client.
|
2019-05-07 16:26:27 +00:00
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
This is a convenience method that calls :meth:`~Client.start`, :meth:`~Client.idle` and :meth:`~Client.stop` in
|
|
|
|
sequence. It makes running a client less verbose, but is not suitable in case you want to run more than one
|
|
|
|
client in a single main script, since idle() will block after starting the own client.
|
2019-05-07 16:26:27 +00:00
|
|
|
|
2018-06-22 11:10:09 +00:00
|
|
|
Raises:
|
2019-07-25 09:17:28 +00:00
|
|
|
ConnectionError: In case you try to run an already started client.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 7
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
... # Set handlers up
|
|
|
|
|
|
|
|
app.run()
|
2018-06-22 11:10:09 +00:00
|
|
|
"""
|
2019-07-04 10:57:07 +00:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
run = loop.run_until_complete
|
2018-06-24 09:39:50 +00:00
|
|
|
|
2019-07-14 23:26:29 +00:00
|
|
|
if coroutine is not None:
|
|
|
|
run(coroutine)
|
|
|
|
else:
|
|
|
|
run(self.start())
|
2019-07-29 22:06:06 +00:00
|
|
|
run(Client.idle())
|
|
|
|
run(self.stop())
|
2018-06-22 11:10:09 +00:00
|
|
|
|
2019-07-04 10:57:07 +00:00
|
|
|
loop.close()
|
|
|
|
|
2018-12-19 13:55:48 +00:00
|
|
|
def add_handler(self, handler: Handler, group: int = 0):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Register an update handler.
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
You can register multiple handlers, but at most one handler within a group will be used for a single update.
|
|
|
|
To handle the same update more than once, register your handler using a different group id (lower group id
|
|
|
|
== higher priority). This mechanism is explained in greater details at
|
|
|
|
:doc:`More on Updates <../../topics/more-on-updates>`.
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-05-07 12:30:55 +00:00
|
|
|
handler (``Handler``):
|
|
|
|
The handler to be registered.
|
|
|
|
|
|
|
|
group (``int``, *optional*):
|
|
|
|
The group identifier, defaults to 0.
|
|
|
|
|
|
|
|
Returns:
|
2019-07-25 09:17:28 +00:00
|
|
|
``tuple``: A tuple consisting of *(handler, group)*.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 8
|
|
|
|
|
|
|
|
from pyrogram import Client, MessageHandler
|
|
|
|
|
|
|
|
def dump(client, message):
|
|
|
|
print(message)
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
app.add_handler(MessageHandler(dump))
|
|
|
|
|
|
|
|
app.run()
|
2018-05-07 12:30:55 +00:00
|
|
|
"""
|
2018-05-23 12:27:17 +00:00
|
|
|
if isinstance(handler, DisconnectHandler):
|
|
|
|
self.disconnect_handler = handler.callback
|
|
|
|
else:
|
|
|
|
self.dispatcher.add_handler(handler, group)
|
2018-05-07 12:30:55 +00:00
|
|
|
|
|
|
|
return handler, group
|
|
|
|
|
2018-12-19 13:55:48 +00:00
|
|
|
def remove_handler(self, handler: Handler, group: int = 0):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Remove a previously-registered update handler.
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
Make sure to provide the right group where the handler was added in. You can use the return value of the
|
|
|
|
:meth:`~Client.add_handler` method, a tuple of *(handler, group)*, and pass it directly.
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-05-07 12:30:55 +00:00
|
|
|
handler (``Handler``):
|
|
|
|
The handler to be removed.
|
|
|
|
|
|
|
|
group (``int``, *optional*):
|
|
|
|
The group identifier, defaults to 0.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 11
|
|
|
|
|
|
|
|
from pyrogram import Client, MessageHandler
|
|
|
|
|
|
|
|
def dump(client, message):
|
|
|
|
print(message)
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
handler = app.add_handler(MessageHandler(dump))
|
|
|
|
|
|
|
|
# Starred expression to unpack (handler, group)
|
|
|
|
app.remove_handler(*handler)
|
|
|
|
|
|
|
|
app.run()
|
2018-05-07 12:30:55 +00:00
|
|
|
"""
|
2018-05-23 12:27:17 +00:00
|
|
|
if isinstance(handler, DisconnectHandler):
|
|
|
|
self.disconnect_handler = None
|
|
|
|
else:
|
|
|
|
self.dispatcher.remove_handler(handler, group)
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2019-01-11 13:02:40 +00:00
|
|
|
def stop_transmission(self):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Stop downloading or uploading a file.
|
2019-07-25 09:17:28 +00:00
|
|
|
|
|
|
|
This method must be called inside a progress callback function in order to stop the transmission at the
|
|
|
|
desired time. The progress callback is called every time a file chunk is uploaded/downloaded.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 9
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
# Example to stop transmission once the upload progress reaches 50%
|
|
|
|
# Useless in practice, but shows how to stop on command
|
|
|
|
def progress(client, current, total):
|
|
|
|
if (current * 100 / total) > 50:
|
|
|
|
client.stop_transmission()
|
|
|
|
|
|
|
|
with app:
|
|
|
|
app.send_document("me", "files.zip", progress=progress)
|
2019-01-11 13:02:40 +00:00
|
|
|
"""
|
|
|
|
raise Client.StopTransmission
|
|
|
|
|
2019-07-11 17:28:33 +00:00
|
|
|
def export_session_string(self):
|
|
|
|
"""Export the current authorized session as a serialized string.
|
|
|
|
|
|
|
|
Session strings are useful for storing in-memory authorized sessions in a portable, serialized string.
|
|
|
|
More detailed information about session strings can be found at the dedicated page of
|
|
|
|
:doc:`Storage Engines <../../topics/storage-engines>`.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``str``: The session serialized into a printable, url-safe string.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 6
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
with app:
|
|
|
|
print(app.export_session_string())
|
|
|
|
"""
|
|
|
|
return self.storage.export_session_string()
|
|
|
|
|
|
|
|
def set_parse_mode(self, parse_mode: Union[str, None] = "combined"):
|
|
|
|
"""Set the parse mode to be used globally by the client.
|
|
|
|
|
2019-07-25 09:17:28 +00:00
|
|
|
When setting the parse mode with this method, all other methods having a *parse_mode* parameter will follow the
|
|
|
|
global value by default. The default value *"combined"* enables both Markdown and HTML styles to be used and
|
|
|
|
combined together.
|
2019-07-11 17:28:33 +00:00
|
|
|
|
|
|
|
Parameters:
|
|
|
|
parse_mode (``str``):
|
|
|
|
The new parse mode, can be any of: *"combined"*, for the default combined mode. *"markdown"* or *"md"*
|
|
|
|
to force Markdown-only styles. *"html"* to force HTML-only styles. *None* to disable the parser
|
|
|
|
completely.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError: In case the provided *parse_mode* is not a valid parse mode.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
:emphasize-lines: 10,14,18,22
|
|
|
|
|
|
|
|
from pyrogram import Client
|
|
|
|
|
|
|
|
app = Client("my_account")
|
|
|
|
|
|
|
|
with app:
|
|
|
|
# Default combined mode: Markdown + HTML
|
|
|
|
app.send_message("haskell", "1. **markdown** and <i>html</i>")
|
|
|
|
|
|
|
|
# Force Markdown-only, HTML is disabled
|
|
|
|
app.set_parse_mode("markdown")
|
|
|
|
app.send_message("haskell", "2. **markdown** and <i>html</i>")
|
|
|
|
|
|
|
|
# Force HTML-only, Markdown is disabled
|
|
|
|
app.set_parse_mode("html")
|
|
|
|
app.send_message("haskell", "3. **markdown** and <i>html</i>")
|
|
|
|
|
|
|
|
# Disable the parser completely
|
|
|
|
app.set_parse_mode(None)
|
|
|
|
app.send_message("haskell", "4. **markdown** and <i>html</i>")
|
|
|
|
|
|
|
|
# Bring back the default combined mode
|
|
|
|
app.set_parse_mode()
|
|
|
|
app.send_message("haskell", "5. **markdown** and <i>html</i>")
|
|
|
|
"""
|
|
|
|
|
|
|
|
if parse_mode not in self.PARSE_MODES:
|
|
|
|
raise ValueError('parse_mode must be one of {} or None. Not "{}"'.format(
|
|
|
|
", ".join('"{}"'.format(m) for m in self.PARSE_MODES[:-1]),
|
|
|
|
parse_mode
|
|
|
|
))
|
|
|
|
|
|
|
|
self.parse_mode = parse_mode
|
|
|
|
|
2019-09-14 17:37:28 +00:00
|
|
|
def fetch_peers(self, peers: List[Union[types.User, types.Chat, types.Channel]]) -> bool:
|
2019-06-01 11:18:48 +00:00
|
|
|
is_min = False
|
2019-06-19 14:10:37 +00:00
|
|
|
parsed_peers = []
|
|
|
|
|
|
|
|
for peer in peers:
|
2019-09-14 17:37:28 +00:00
|
|
|
if getattr(peer, "min", False):
|
|
|
|
is_min = True
|
|
|
|
continue
|
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
username = None
|
|
|
|
phone_number = None
|
|
|
|
|
|
|
|
if isinstance(peer, types.User):
|
|
|
|
peer_id = peer.id
|
|
|
|
access_hash = peer.access_hash
|
2019-09-14 17:37:28 +00:00
|
|
|
username = (peer.username or "").lower() or None
|
2019-06-19 14:10:37 +00:00
|
|
|
phone_number = peer.phone
|
2019-09-14 17:37:28 +00:00
|
|
|
peer_type = "bot" if peer.bot else "user"
|
2019-06-19 14:10:37 +00:00
|
|
|
elif isinstance(peer, (types.Chat, types.ChatForbidden)):
|
|
|
|
peer_id = -peer.id
|
|
|
|
access_hash = 0
|
|
|
|
peer_type = "group"
|
|
|
|
elif isinstance(peer, (types.Channel, types.ChannelForbidden)):
|
2019-08-01 17:07:08 +00:00
|
|
|
peer_id = utils.get_channel_id(peer.id)
|
2019-06-19 14:10:37 +00:00
|
|
|
access_hash = peer.access_hash
|
2019-09-14 17:37:28 +00:00
|
|
|
username = (getattr(peer, "username", None) or "").lower() or None
|
|
|
|
peer_type = "channel" if peer.broadcast else "supergroup"
|
2019-06-19 14:10:37 +00:00
|
|
|
else:
|
2019-03-01 18:23:53 +00:00
|
|
|
continue
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
parsed_peers.append((peer_id, access_hash, peer_type, username, phone_number))
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
self.storage.update_peers(parsed_peers)
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2019-06-01 11:18:48 +00:00
|
|
|
return is_min
|
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
async def download_worker(self):
|
2018-02-18 17:11:33 +00:00
|
|
|
while True:
|
2019-06-02 17:13:17 +00:00
|
|
|
packet = await self.download_queue.get()
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2019-05-29 08:40:37 +00:00
|
|
|
if packet is None:
|
2018-02-18 17:11:33 +00:00
|
|
|
break
|
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
temp_file_path = ""
|
|
|
|
final_file_path = ""
|
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
try:
|
2019-06-15 21:02:31 +00:00
|
|
|
data, directory, file_name, done, progress, progress_args, path = packet
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
temp_file_path = await self.get_file(
|
2019-05-29 08:40:37 +00:00
|
|
|
media_type=data.media_type,
|
|
|
|
dc_id=data.dc_id,
|
2019-06-07 16:48:34 +00:00
|
|
|
document_id=data.document_id,
|
2019-05-29 08:40:37 +00:00
|
|
|
access_hash=data.access_hash,
|
|
|
|
thumb_size=data.thumb_size,
|
|
|
|
peer_id=data.peer_id,
|
2019-08-02 00:33:52 +00:00
|
|
|
peer_access_hash=data.peer_access_hash,
|
2019-05-29 08:40:37 +00:00
|
|
|
volume_id=data.volume_id,
|
|
|
|
local_id=data.local_id,
|
|
|
|
file_size=data.file_size,
|
|
|
|
is_big=data.is_big,
|
2018-05-06 13:45:42 +00:00
|
|
|
progress=progress,
|
|
|
|
progress_args=progress_args
|
2018-04-15 23:07:02 +00:00
|
|
|
)
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-03-21 12:39:23 +00:00
|
|
|
if temp_file_path:
|
2018-03-21 15:17:13 +00:00
|
|
|
final_file_path = os.path.abspath(re.sub("\\\\", "/", os.path.join(directory, file_name)))
|
2018-03-21 14:42:32 +00:00
|
|
|
os.makedirs(directory, exist_ok=True)
|
|
|
|
shutil.move(temp_file_path, final_file_path)
|
2018-02-18 17:11:33 +00:00
|
|
|
except Exception as e:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.error(e, exc_info=True)
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
try:
|
2018-03-21 12:39:23 +00:00
|
|
|
os.remove(temp_file_path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# TODO: "" or None for faulty download, which is better?
|
|
|
|
# os.path methods return "" in case something does not exist, I prefer this.
|
|
|
|
# For now let's keep None
|
|
|
|
path[0] = final_file_path or None
|
|
|
|
finally:
|
|
|
|
done.set()
|
2018-03-20 20:20:04 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
async def updates_worker(self):
|
2018-02-08 19:46:47 +00:00
|
|
|
while True:
|
2018-06-20 09:41:22 +00:00
|
|
|
updates = await self.updates_queue.get()
|
2018-02-08 19:46:47 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
if updates is None:
|
2018-02-08 19:46:47 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
try:
|
2018-02-13 11:08:10 +00:00
|
|
|
if isinstance(updates, (types.Update, types.UpdatesCombined)):
|
2019-06-01 11:18:48 +00:00
|
|
|
is_min = self.fetch_peers(updates.users) or self.fetch_peers(updates.chats)
|
|
|
|
|
|
|
|
users = {u.id: u for u in updates.users}
|
|
|
|
chats = {c.id: c for c in updates.chats}
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
for update in updates.updates:
|
2018-02-11 12:19:52 +00:00
|
|
|
channel_id = getattr(
|
|
|
|
getattr(
|
|
|
|
getattr(
|
2018-02-13 11:08:10 +00:00
|
|
|
update, "message", None
|
2018-02-11 12:19:52 +00:00
|
|
|
), "to_id", None
|
|
|
|
), "channel_id", None
|
2018-02-13 11:08:10 +00:00
|
|
|
) or getattr(update, "channel_id", None)
|
2018-02-11 12:19:52 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
pts = getattr(update, "pts", None)
|
2018-03-19 00:08:34 +00:00
|
|
|
pts_count = getattr(update, "pts_count", None)
|
2018-07-04 12:03:14 +00:00
|
|
|
|
2018-07-03 16:29:25 +00:00
|
|
|
if isinstance(update, types.UpdateChannelTooLong):
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning(update)
|
2018-03-19 00:08:34 +00:00
|
|
|
|
2019-06-01 11:18:48 +00:00
|
|
|
if isinstance(update, types.UpdateNewChannelMessage) and is_min:
|
2018-03-25 15:49:43 +00:00
|
|
|
message = update.message
|
|
|
|
|
|
|
|
if not isinstance(message, types.MessageEmpty):
|
2018-10-01 07:40:34 +00:00
|
|
|
try:
|
2018-10-01 14:21:14 +00:00
|
|
|
diff = await self.send(
|
2018-10-01 07:40:34 +00:00
|
|
|
functions.updates.GetChannelDifference(
|
2019-08-03 08:48:35 +00:00
|
|
|
channel=await self.resolve_peer(utils.get_channel_id(channel_id)),
|
2018-10-01 07:40:34 +00:00
|
|
|
filter=types.ChannelMessagesFilter(
|
|
|
|
ranges=[types.MessageRange(
|
|
|
|
min_id=update.message.id,
|
|
|
|
max_id=update.message.id
|
|
|
|
)]
|
|
|
|
),
|
|
|
|
pts=pts - pts_count,
|
|
|
|
limit=pts
|
|
|
|
)
|
2018-03-25 15:49:43 +00:00
|
|
|
)
|
2018-10-01 07:40:34 +00:00
|
|
|
except ChannelPrivate:
|
2018-10-01 07:45:32 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if not isinstance(diff, types.updates.ChannelDifferenceEmpty):
|
2019-06-01 11:18:48 +00:00
|
|
|
users.update({u.id: u for u in diff.users})
|
|
|
|
chats.update({c.id: c for c in diff.chats})
|
2018-02-11 12:19:52 +00:00
|
|
|
|
2019-06-02 17:13:17 +00:00
|
|
|
self.dispatcher.updates_queue.put_nowait((update, users, chats))
|
2018-02-13 11:08:10 +00:00
|
|
|
elif isinstance(updates, (types.UpdateShortMessage, types.UpdateShortChatMessage)):
|
2018-06-20 09:41:22 +00:00
|
|
|
diff = await self.send(
|
2018-02-10 15:30:13 +00:00
|
|
|
functions.updates.GetDifference(
|
2018-02-13 11:08:10 +00:00
|
|
|
pts=updates.pts - updates.pts_count,
|
|
|
|
date=updates.date,
|
2018-02-10 15:30:13 +00:00
|
|
|
qts=-1
|
2018-02-09 00:52:40 +00:00
|
|
|
)
|
2018-02-10 15:30:13 +00:00
|
|
|
)
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-04-30 17:21:58 +00:00
|
|
|
if diff.new_messages:
|
2018-11-13 19:36:04 +00:00
|
|
|
self.dispatcher.updates_queue.put_nowait((
|
2018-04-30 17:21:58 +00:00
|
|
|
types.UpdateNewMessage(
|
|
|
|
message=diff.new_messages[0],
|
|
|
|
pts=updates.pts,
|
|
|
|
pts_count=updates.pts_count
|
|
|
|
),
|
2019-06-01 11:18:48 +00:00
|
|
|
{u.id: u for u in diff.users},
|
|
|
|
{c.id: c for c in diff.chats}
|
2018-04-30 17:21:58 +00:00
|
|
|
))
|
|
|
|
else:
|
2019-06-02 17:13:17 +00:00
|
|
|
self.dispatcher.updates_queue.put_nowait((diff.other_updates[0], {}, {}))
|
2018-02-13 11:08:10 +00:00
|
|
|
elif isinstance(updates, types.UpdateShort):
|
2019-06-02 17:13:17 +00:00
|
|
|
self.dispatcher.updates_queue.put_nowait((updates.update, {}, {}))
|
2018-07-04 11:18:20 +00:00
|
|
|
elif isinstance(updates, types.UpdatesTooLong):
|
2019-09-08 17:24:06 +00:00
|
|
|
log.info(updates)
|
2018-02-08 19:46:47 +00:00
|
|
|
except Exception as e:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.error(e, exc_info=True)
|
2018-02-08 19:46:47 +00:00
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
async def send(self, data: TLObject, retries: int = Session.MAX_RETRIES, timeout: float = Session.WAIT_TIMEOUT):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Send raw Telegram queries.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
This method makes it possible to manually call every single Telegram API method in a low-level manner.
|
2018-03-25 20:12:52 +00:00
|
|
|
Available functions are listed in the :obj:`functions <pyrogram.api.functions>` package and may accept compound
|
|
|
|
data types from :obj:`types <pyrogram.api.types>` as well as bare types such as ``int``, ``str``, etc...
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
.. note::
|
|
|
|
|
|
|
|
This is a utility method intended to be used **only** when working with raw
|
|
|
|
:obj:`functions <pyrogram.api.functions>` (i.e: a Telegram API method you wish to use which is not
|
|
|
|
available yet in the Client class as an easy-to-use method).
|
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
|
|
|
data (``RawFunction``):
|
2018-11-03 09:49:11 +00:00
|
|
|
The API Schema function filled with proper arguments.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-06-24 15:47:29 +00:00
|
|
|
retries (``int``):
|
|
|
|
Number of retries.
|
|
|
|
|
|
|
|
timeout (``float``):
|
|
|
|
Timeout in seconds.
|
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Returns:
|
|
|
|
``RawType``: The raw type response generated by the query.
|
|
|
|
|
2017-12-30 18:23:18 +00:00
|
|
|
Raises:
|
2019-05-09 02:28:46 +00:00
|
|
|
RPCError: In case of a Telegram RPC error.
|
2017-12-29 20:44:45 +00:00
|
|
|
"""
|
2019-08-13 11:15:58 +00:00
|
|
|
if not self.is_connected:
|
|
|
|
raise ConnectionError("Client has not been started yet")
|
2018-02-26 14:44:08 +00:00
|
|
|
|
2019-01-03 10:13:24 +00:00
|
|
|
if self.no_updates:
|
2019-03-16 15:50:40 +00:00
|
|
|
data = functions.InvokeWithoutUpdates(query=data)
|
2019-01-03 10:13:24 +00:00
|
|
|
|
2019-01-03 11:20:42 +00:00
|
|
|
if self.takeout_id:
|
2019-03-16 15:50:40 +00:00
|
|
|
data = functions.InvokeWithTakeout(takeout_id=self.takeout_id, query=data)
|
2019-01-03 11:20:42 +00:00
|
|
|
|
2018-06-24 17:27:37 +00:00
|
|
|
r = await self.session.send(data, retries, timeout)
|
2018-02-26 14:44:08 +00:00
|
|
|
|
2018-04-03 09:40:08 +00:00
|
|
|
self.fetch_peers(getattr(r, "users", []))
|
|
|
|
self.fetch_peers(getattr(r, "chats", []))
|
|
|
|
|
|
|
|
return r
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
def load_config(self):
|
2018-01-16 21:05:19 +00:00
|
|
|
parser = ConfigParser()
|
2019-06-20 12:15:02 +00:00
|
|
|
parser.read(str(self.config_file))
|
2019-08-10 20:37:07 +00:00
|
|
|
|
2019-08-08 13:07:46 +00:00
|
|
|
if self.bot_token:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.bot_token = parser.get("pyrogram", "bot_token", fallback=None)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-04-05 10:55:34 +00:00
|
|
|
if self.api_id and self.api_hash:
|
|
|
|
pass
|
2018-02-21 12:31:27 +00:00
|
|
|
else:
|
2018-04-05 10:55:34 +00:00
|
|
|
if parser.has_section("pyrogram"):
|
|
|
|
self.api_id = parser.getint("pyrogram", "api_id")
|
|
|
|
self.api_hash = parser.get("pyrogram", "api_hash")
|
|
|
|
else:
|
2019-09-09 13:44:45 +00:00
|
|
|
raise AttributeError("No API Key found. More info: https://docs.pyrogram.org/intro/setup")
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-09-18 17:17:28 +00:00
|
|
|
for option in ["app_version", "device_model", "system_version", "lang_code"]:
|
2018-06-24 16:54:47 +00:00
|
|
|
if getattr(self, option):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if parser.has_section("pyrogram"):
|
|
|
|
setattr(self, option, parser.get(
|
|
|
|
"pyrogram",
|
|
|
|
option,
|
|
|
|
fallback=getattr(Client, option.upper())
|
|
|
|
))
|
2018-09-19 15:30:23 +00:00
|
|
|
else:
|
|
|
|
setattr(self, option, getattr(Client, option.upper()))
|
2018-06-24 16:47:49 +00:00
|
|
|
|
2018-05-25 09:52:40 +00:00
|
|
|
if self._proxy:
|
2019-01-16 14:46:46 +00:00
|
|
|
self._proxy["enabled"] = bool(self._proxy.get("enabled", True))
|
2018-04-13 10:30:13 +00:00
|
|
|
else:
|
2018-05-25 09:52:40 +00:00
|
|
|
self._proxy = {}
|
2018-04-13 10:30:13 +00:00
|
|
|
|
|
|
|
if parser.has_section("proxy"):
|
2019-01-16 14:46:46 +00:00
|
|
|
self._proxy["enabled"] = parser.getboolean("proxy", "enabled", fallback=True)
|
2018-05-25 09:52:40 +00:00
|
|
|
self._proxy["hostname"] = parser.get("proxy", "hostname")
|
|
|
|
self._proxy["port"] = parser.getint("proxy", "port")
|
|
|
|
self._proxy["username"] = parser.get("proxy", "username", fallback=None) or None
|
|
|
|
self._proxy["password"] = parser.get("proxy", "password", fallback=None) or None
|
2018-01-16 21:05:19 +00:00
|
|
|
|
2019-01-16 14:40:02 +00:00
|
|
|
if self.plugins:
|
2019-06-11 16:31:38 +00:00
|
|
|
self.plugins = {
|
|
|
|
"enabled": bool(self.plugins.get("enabled", True)),
|
|
|
|
"root": self.plugins.get("root", None),
|
|
|
|
"include": self.plugins.get("include", []),
|
|
|
|
"exclude": self.plugins.get("exclude", [])
|
|
|
|
}
|
2019-01-16 14:40:02 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
section = parser["plugins"]
|
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
self.plugins = {
|
|
|
|
"enabled": section.getboolean("enabled", True),
|
2019-06-11 16:31:38 +00:00
|
|
|
"root": section.get("root", None),
|
|
|
|
"include": section.get("include", []),
|
|
|
|
"exclude": section.get("exclude", [])
|
2019-01-16 19:25:48 +00:00
|
|
|
}
|
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
include = self.plugins["include"]
|
|
|
|
exclude = self.plugins["exclude"]
|
2019-01-16 14:40:02 +00:00
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
if include:
|
|
|
|
self.plugins["include"] = include.strip().split("\n")
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
if exclude:
|
|
|
|
self.plugins["exclude"] = exclude.strip().split("\n")
|
2018-04-13 13:20:37 +00:00
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
except KeyError:
|
|
|
|
self.plugins = None
|
2019-01-16 14:40:02 +00:00
|
|
|
|
2019-06-20 02:17:24 +00:00
|
|
|
async def load_session(self):
|
2019-06-19 14:10:37 +00:00
|
|
|
self.storage.open()
|
2018-04-13 13:47:07 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
session_empty = any([
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.test_mode() is None,
|
|
|
|
self.storage.auth_key() is None,
|
|
|
|
self.storage.user_id() is None,
|
|
|
|
self.storage.is_bot() is None
|
2019-06-19 14:10:37 +00:00
|
|
|
])
|
2018-04-13 13:20:37 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
if session_empty:
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.dc_id(2)
|
|
|
|
self.storage.date(0)
|
2018-04-13 13:47:07 +00:00
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.test_mode(self.test_mode)
|
2019-09-14 18:42:06 +00:00
|
|
|
self.storage.auth_key(await Auth(self, self.storage.dc_id()).create())
|
2019-09-14 17:57:20 +00:00
|
|
|
self.storage.user_id(None)
|
|
|
|
self.storage.is_bot(None)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-10-12 15:57:34 +00:00
|
|
|
def load_plugins(self):
|
2019-06-11 16:31:38 +00:00
|
|
|
if self.plugins:
|
|
|
|
plugins = self.plugins.copy()
|
|
|
|
|
|
|
|
for option in ["include", "exclude"]:
|
|
|
|
if plugins[option]:
|
|
|
|
plugins[option] = [
|
|
|
|
(i.split()[0], i.split()[1:] or None)
|
|
|
|
for i in self.plugins[option]
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
|
|
|
|
if plugins.get("enabled", False):
|
|
|
|
root = plugins["root"]
|
|
|
|
include = plugins["include"]
|
|
|
|
exclude = plugins["exclude"]
|
2019-01-16 14:40:02 +00:00
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
count = 0
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
if not include:
|
2019-01-16 14:40:02 +00:00
|
|
|
for path in sorted(Path(root).rglob("*.py")):
|
2019-02-27 21:27:50 +00:00
|
|
|
module_path = '.'.join(path.parent.parts + (path.stem,))
|
2019-01-16 14:40:02 +00:00
|
|
|
module = import_module(module_path)
|
|
|
|
|
|
|
|
for name in vars(module).keys():
|
|
|
|
# noinspection PyBroadException
|
|
|
|
try:
|
2019-07-11 15:14:38 +00:00
|
|
|
handler, group = getattr(module, name).handler
|
2019-01-16 14:40:02 +00:00
|
|
|
|
|
|
|
if isinstance(handler, Handler) and isinstance(group, int):
|
|
|
|
self.add_handler(handler, group)
|
|
|
|
|
2019-09-08 17:24:06 +00:00
|
|
|
log.info('[{}] [LOAD] {}("{}") in group {} from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, type(handler).__name__, name, group, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
count += 1
|
2019-01-16 14:40:02 +00:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
for path, handlers in include:
|
|
|
|
module_path = root + "." + path
|
|
|
|
warn_non_existent_functions = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
module = import_module(module_path)
|
2019-03-04 11:50:42 +00:00
|
|
|
except ImportError:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [LOAD] Ignoring non-existent module "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if "__path__" in dir(module):
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [LOAD] Ignoring namespace "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if handlers is None:
|
|
|
|
handlers = vars(module).keys()
|
|
|
|
warn_non_existent_functions = False
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-01-16 14:40:02 +00:00
|
|
|
for name in handlers:
|
|
|
|
# noinspection PyBroadException
|
|
|
|
try:
|
2019-07-11 15:14:38 +00:00
|
|
|
handler, group = getattr(module, name).handler
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-01-16 14:40:02 +00:00
|
|
|
if isinstance(handler, Handler) and isinstance(group, int):
|
|
|
|
self.add_handler(handler, group)
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-09-08 17:24:06 +00:00
|
|
|
log.info('[{}] [LOAD] {}("{}") in group {} from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, type(handler).__name__, name, group, module_path))
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
count += 1
|
2019-01-16 14:40:02 +00:00
|
|
|
except Exception:
|
|
|
|
if warn_non_existent_functions:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [LOAD] Ignoring non-existent function "{}" from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, name, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
|
2019-06-11 16:31:38 +00:00
|
|
|
if exclude:
|
2019-01-16 14:40:02 +00:00
|
|
|
for path, handlers in exclude:
|
|
|
|
module_path = root + "." + path
|
|
|
|
warn_non_existent_functions = True
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2018-10-23 13:43:49 +00:00
|
|
|
try:
|
2019-01-16 14:40:02 +00:00
|
|
|
module = import_module(module_path)
|
2019-03-04 11:50:42 +00:00
|
|
|
except ImportError:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [UNLOAD] Ignoring non-existent module "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if "__path__" in dir(module):
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [UNLOAD] Ignoring namespace "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, module_path))
|
2019-01-16 14:40:02 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if handlers is None:
|
|
|
|
handlers = vars(module).keys()
|
|
|
|
warn_non_existent_functions = False
|
|
|
|
|
|
|
|
for name in handlers:
|
|
|
|
# noinspection PyBroadException
|
|
|
|
try:
|
2019-07-11 15:14:38 +00:00
|
|
|
handler, group = getattr(module, name).handler
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-01-16 14:40:02 +00:00
|
|
|
if isinstance(handler, Handler) and isinstance(group, int):
|
|
|
|
self.remove_handler(handler, group)
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-09-08 17:24:06 +00:00
|
|
|
log.info('[{}] [UNLOAD] {}("{}") from group {} in "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, type(handler).__name__, name, group, module_path))
|
2018-10-23 13:43:49 +00:00
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
count -= 1
|
2019-01-16 14:40:02 +00:00
|
|
|
except Exception:
|
|
|
|
if warn_non_existent_functions:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] [UNLOAD] Ignoring non-existent function "{}" from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, name, module_path))
|
2018-10-23 13:43:49 +00:00
|
|
|
|
2019-01-16 19:25:48 +00:00
|
|
|
if count > 0:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] Successfully loaded {} plugin{} from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, count, "s" if count > 1 else "", root))
|
2018-10-23 13:43:49 +00:00
|
|
|
else:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.warning('[{}] No plugin loaded from "{}"'.format(
|
2019-05-06 15:39:57 +00:00
|
|
|
self.session_name, root))
|
2018-10-12 15:57:34 +00:00
|
|
|
|
2019-08-13 11:15:58 +00:00
|
|
|
# def get_initial_dialogs_chunk(self, offset_date: int = 0):
|
|
|
|
# while True:
|
|
|
|
# try:
|
|
|
|
# r = self.send(
|
|
|
|
# functions.messages.GetDialogs(
|
|
|
|
# offset_date=offset_date,
|
|
|
|
# offset_id=0,
|
|
|
|
# offset_peer=types.InputPeerEmpty(),
|
|
|
|
# limit=self.DIALOGS_AT_ONCE,
|
|
|
|
# hash=0,
|
|
|
|
# exclude_pinned=True
|
|
|
|
# )
|
|
|
|
# )
|
|
|
|
# except FloodWait as e:
|
2019-09-08 17:24:06 +00:00
|
|
|
# log.warning("get_dialogs flood: waiting {} seconds".format(e.x))
|
2019-08-13 11:15:58 +00:00
|
|
|
# time.sleep(e.x)
|
|
|
|
# else:
|
2019-09-08 17:24:06 +00:00
|
|
|
# log.info("Total peers: {}".format(self.storage.peers_count))
|
2019-08-13 11:15:58 +00:00
|
|
|
# return r
|
|
|
|
#
|
|
|
|
# def get_initial_dialogs(self):
|
|
|
|
# self.send(functions.messages.GetPinnedDialogs(folder_id=0))
|
|
|
|
#
|
|
|
|
# dialogs = self.get_initial_dialogs_chunk()
|
|
|
|
# offset_date = utils.get_offset_date(dialogs)
|
|
|
|
#
|
|
|
|
# while len(dialogs.dialogs) == self.DIALOGS_AT_ONCE:
|
|
|
|
# dialogs = self.get_initial_dialogs_chunk(offset_date)
|
|
|
|
# offset_date = utils.get_offset_date(dialogs)
|
|
|
|
#
|
|
|
|
# self.get_initial_dialogs_chunk()
|
2018-04-02 10:14:22 +00:00
|
|
|
|
2019-09-08 11:26:10 +00:00
|
|
|
async def resolve_peer(self, peer_id: Union[int, str]):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Get the InputPeer of a known peer id.
|
2019-05-07 16:26:27 +00:00
|
|
|
Useful whenever an InputPeer type is required.
|
2018-01-25 15:41:59 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
.. note::
|
2018-02-13 15:24:04 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
This is a utility method intended to be used **only** when working with raw
|
|
|
|
:obj:`functions <pyrogram.api.functions>` (i.e: a Telegram API method you wish to use which is not
|
|
|
|
available yet in the Client class as an easy-to-use method).
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-08-10 09:29:13 +00:00
|
|
|
peer_id (``int`` | ``str``):
|
|
|
|
The peer id you want to extract the InputPeer from.
|
|
|
|
Can be a direct id (int), a username (str) or a phone number (str).
|
2018-02-13 15:24:04 +00:00
|
|
|
|
|
|
|
Returns:
|
2019-05-09 02:28:46 +00:00
|
|
|
``InputPeer``: On success, the resolved peer id is returned in form of an InputPeer object.
|
2018-02-13 15:24:04 +00:00
|
|
|
|
|
|
|
Raises:
|
2019-05-09 02:28:46 +00:00
|
|
|
KeyError: In case the peer doesn't exist in the internal database.
|
2018-02-13 15:24:04 +00:00
|
|
|
"""
|
2019-08-13 11:15:58 +00:00
|
|
|
if not self.is_connected:
|
|
|
|
raise ConnectionError("Client has not been started yet")
|
|
|
|
|
2018-12-24 21:31:45 +00:00
|
|
|
try:
|
2019-06-19 14:10:37 +00:00
|
|
|
return self.storage.get_peer_by_id(peer_id)
|
2018-12-24 21:31:45 +00:00
|
|
|
except KeyError:
|
|
|
|
if type(peer_id) is str:
|
|
|
|
if peer_id in ("self", "me"):
|
|
|
|
return types.InputPeerSelf()
|
2018-01-25 15:41:59 +00:00
|
|
|
|
2018-12-24 21:31:45 +00:00
|
|
|
peer_id = re.sub(r"[@+\s]", "", peer_id.lower())
|
2018-02-20 14:48:10 +00:00
|
|
|
|
2018-12-24 21:31:45 +00:00
|
|
|
try:
|
|
|
|
int(peer_id)
|
|
|
|
except ValueError:
|
2019-02-26 16:24:00 +00:00
|
|
|
try:
|
2019-06-19 14:10:37 +00:00
|
|
|
return self.storage.get_peer_by_username(peer_id)
|
2019-02-26 16:24:00 +00:00
|
|
|
except KeyError:
|
2019-09-08 11:26:10 +00:00
|
|
|
await self.send(
|
2019-01-16 14:46:46 +00:00
|
|
|
functions.contacts.ResolveUsername(
|
|
|
|
username=peer_id
|
|
|
|
)
|
|
|
|
)
|
2018-08-10 09:29:13 +00:00
|
|
|
|
2019-06-19 14:10:37 +00:00
|
|
|
return self.storage.get_peer_by_username(peer_id)
|
2018-12-24 21:31:45 +00:00
|
|
|
else:
|
|
|
|
try:
|
2019-06-19 14:10:37 +00:00
|
|
|
return self.storage.get_peer_by_phone_number(peer_id)
|
2018-12-24 21:31:45 +00:00
|
|
|
except KeyError:
|
|
|
|
raise PeerIdInvalid
|
|
|
|
|
2019-09-14 17:57:20 +00:00
|
|
|
peer_type = utils.get_peer_type(peer_id)
|
2019-08-01 17:07:08 +00:00
|
|
|
|
|
|
|
if peer_type == "user":
|
2018-12-24 21:31:45 +00:00
|
|
|
self.fetch_peers(
|
2019-03-27 13:59:55 +00:00
|
|
|
await self.send(
|
2018-12-24 21:31:45 +00:00
|
|
|
functions.users.GetUsers(
|
2019-08-01 17:07:08 +00:00
|
|
|
id=[
|
|
|
|
types.InputUser(
|
|
|
|
user_id=peer_id,
|
|
|
|
access_hash=0
|
|
|
|
)
|
|
|
|
]
|
2018-12-24 21:31:45 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2019-08-01 17:07:08 +00:00
|
|
|
elif peer_type == "chat":
|
2019-08-03 08:48:35 +00:00
|
|
|
await self.send(
|
2019-08-01 17:07:08 +00:00
|
|
|
functions.messages.GetChats(
|
|
|
|
id=[-peer_id]
|
|
|
|
)
|
|
|
|
)
|
2018-02-20 14:48:10 +00:00
|
|
|
else:
|
2019-08-03 08:48:35 +00:00
|
|
|
await self.send(
|
2019-08-01 17:07:08 +00:00
|
|
|
functions.channels.GetChannels(
|
|
|
|
id=[
|
|
|
|
types.InputChannel(
|
|
|
|
channel_id=utils.get_channel_id(peer_id),
|
2019-06-19 14:10:37 +00:00
|
|
|
access_hash=0
|
2019-08-01 17:07:08 +00:00
|
|
|
)
|
|
|
|
]
|
2018-12-24 21:31:45 +00:00
|
|
|
)
|
2019-08-01 17:07:08 +00:00
|
|
|
)
|
2018-02-20 14:28:01 +00:00
|
|
|
|
2018-12-24 21:31:45 +00:00
|
|
|
try:
|
2019-06-19 14:10:37 +00:00
|
|
|
return self.storage.get_peer_by_id(peer_id)
|
2018-02-13 13:28:45 +00:00
|
|
|
except KeyError:
|
2018-12-24 21:31:45 +00:00
|
|
|
raise PeerIdInvalid
|
2017-12-06 19:45:56 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
async def save_file(self,
|
|
|
|
path: str,
|
|
|
|
file_id: int = None,
|
|
|
|
file_part: int = 0,
|
|
|
|
progress: callable = None,
|
2019-05-25 00:02:37 +00:00
|
|
|
progress_args: tuple = ()
|
2019-06-02 17:13:17 +00:00
|
|
|
):
|
2019-05-12 17:49:06 +00:00
|
|
|
"""Upload a file onto Telegram servers, without actually sending the message to anyone.
|
2019-05-07 16:26:27 +00:00
|
|
|
Useful whenever an InputFile type is required.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
.. note::
|
2018-12-28 14:16:46 +00:00
|
|
|
|
2019-05-07 16:26:27 +00:00
|
|
|
This is a utility method intended to be used **only** when working with raw
|
|
|
|
:obj:`functions <pyrogram.api.functions>` (i.e: a Telegram API method you wish to use which is not
|
|
|
|
available yet in the Client class as an easy-to-use method).
|
2019-01-16 14:46:46 +00:00
|
|
|
|
2019-05-09 02:28:46 +00:00
|
|
|
Parameters:
|
2018-12-28 14:16:46 +00:00
|
|
|
path (``str``):
|
|
|
|
The path of the file you want to upload that exists on your local machine.
|
|
|
|
|
|
|
|
file_id (``int``, *optional*):
|
|
|
|
In case a file part expired, pass the file_id and the file_part to retry uploading that specific chunk.
|
|
|
|
|
|
|
|
file_part (``int``, *optional*):
|
|
|
|
In case a file part expired, pass the file_id and the file_part to retry uploading that specific chunk.
|
|
|
|
|
|
|
|
progress (``callable``, *optional*):
|
2019-07-25 09:17:28 +00:00
|
|
|
Pass a callback function to view the file transmission progress.
|
|
|
|
The function must take *(current, total)* as positional arguments (look at Other Parameters below for a
|
|
|
|
detailed description) and will be called back each time a new file chunk has been successfully
|
|
|
|
transmitted.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
|
|
|
progress_args (``tuple``, *optional*):
|
2019-07-25 09:17:28 +00:00
|
|
|
Extra custom arguments for the progress callback function.
|
|
|
|
You can pass anything you need to be available in the progress callback scope; for example, a Message
|
|
|
|
object or a Client instance in order to edit the message with the updated progress status.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
|
|
|
Other Parameters:
|
|
|
|
current (``int``):
|
2019-07-25 09:17:28 +00:00
|
|
|
The amount of bytes transmitted so far.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
|
|
|
total (``int``):
|
2019-07-25 09:17:28 +00:00
|
|
|
The total size of the file.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
|
|
|
*args (``tuple``, *optional*):
|
|
|
|
Extra custom arguments as defined in the *progress_args* parameter.
|
|
|
|
You can either keep *\*args* or add every single extra argument in your function signature.
|
|
|
|
|
|
|
|
Returns:
|
2019-05-09 02:28:46 +00:00
|
|
|
``InputFile``: On success, the uploaded file is returned in form of an InputFile object.
|
2018-12-28 14:16:46 +00:00
|
|
|
|
|
|
|
Raises:
|
2019-05-09 02:28:46 +00:00
|
|
|
RPCError: In case of a Telegram RPC error.
|
2018-12-28 14:16:46 +00:00
|
|
|
"""
|
2018-12-31 11:06:15 +00:00
|
|
|
|
2018-07-03 14:34:55 +00:00
|
|
|
async def worker(session):
|
2018-06-30 09:26:45 +00:00
|
|
|
while True:
|
|
|
|
data = await queue.get()
|
|
|
|
|
|
|
|
if data is None:
|
|
|
|
return
|
|
|
|
|
2018-07-03 14:34:55 +00:00
|
|
|
try:
|
2019-08-03 08:37:48 +00:00
|
|
|
await asyncio.ensure_future(session.send(data))
|
2018-07-03 14:34:55 +00:00
|
|
|
except Exception as e:
|
2019-09-08 11:26:10 +00:00
|
|
|
logging.error(e)
|
2018-06-30 09:26:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
part_size = 512 * 1024
|
|
|
|
file_size = os.path.getsize(path)
|
2018-12-18 08:50:39 +00:00
|
|
|
|
2018-11-03 10:10:43 +00:00
|
|
|
if file_size == 0:
|
|
|
|
raise ValueError("File size equals to 0 B")
|
2018-12-18 08:50:39 +00:00
|
|
|
|
2018-10-21 07:41:30 +00:00
|
|
|
if file_size > 1500 * 1024 * 1024:
|
|
|
|
raise ValueError("Telegram doesn't support uploading files bigger than 1500 MiB")
|
2018-12-18 08:50:39 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
file_total_parts = int(math.ceil(file_size / part_size))
|
2018-06-30 09:30:32 +00:00
|
|
|
is_big = file_size > 10 * 1024 * 1024
|
2019-03-03 16:11:55 +00:00
|
|
|
pool_size = 3 if is_big else 1
|
|
|
|
workers_count = 4 if is_big else 1
|
2018-06-30 09:30:32 +00:00
|
|
|
is_missing_part = file_id is not None
|
2018-05-07 12:30:55 +00:00
|
|
|
file_id = file_id or self.rnd_id()
|
|
|
|
md5_sum = md5() if not is_big and not is_missing_part else None
|
2019-09-14 18:42:06 +00:00
|
|
|
pool = [Session(self, self.storage.dc_id(), self.storage.auth_key(), is_media=True) for _ in range(pool_size)]
|
2019-08-03 08:37:48 +00:00
|
|
|
workers = [asyncio.ensure_future(worker(session)) for session in pool for _ in range(workers_count)]
|
2018-06-30 09:26:45 +00:00
|
|
|
queue = asyncio.Queue(16)
|
2018-01-23 14:18:52 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
try:
|
2018-07-03 14:34:55 +00:00
|
|
|
for session in pool:
|
|
|
|
await session.start()
|
2018-06-30 09:26:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
with open(path, "rb") as f:
|
|
|
|
f.seek(part_size * file_part)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
while True:
|
|
|
|
chunk = f.read(part_size)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if not chunk:
|
|
|
|
if not is_big:
|
|
|
|
md5_sum = "".join([hex(i)[2:].zfill(2) for i in md5_sum.digest()])
|
|
|
|
break
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if is_big:
|
|
|
|
rpc = functions.upload.SaveBigFilePart(
|
|
|
|
file_id=file_id,
|
|
|
|
file_part=file_part,
|
|
|
|
file_total_parts=file_total_parts,
|
|
|
|
bytes=chunk
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
rpc = functions.upload.SaveFilePart(
|
|
|
|
file_id=file_id,
|
|
|
|
file_part=file_part,
|
|
|
|
bytes=chunk
|
|
|
|
)
|
2018-04-28 07:53:21 +00:00
|
|
|
|
2018-06-30 09:26:45 +00:00
|
|
|
await queue.put(rpc)
|
2017-12-30 18:23:18 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if is_missing_part:
|
|
|
|
return
|
2017-12-06 19:45:56 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if not is_big:
|
|
|
|
md5_sum.update(chunk)
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
file_part += 1
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if progress:
|
2019-09-07 11:40:26 +00:00
|
|
|
await progress(min(file_part * part_size, file_size), file_size, *progress_args)
|
2019-01-11 13:03:16 +00:00
|
|
|
except Client.StopTransmission:
|
|
|
|
raise
|
2018-05-07 12:30:55 +00:00
|
|
|
except Exception as e:
|
2019-09-08 17:24:06 +00:00
|
|
|
log.error(e, exc_info=True)
|
2018-05-07 12:30:55 +00:00
|
|
|
else:
|
|
|
|
if is_big:
|
|
|
|
return types.InputFileBig(
|
|
|
|
id=file_id,
|
|
|
|
parts=file_total_parts,
|
|
|
|
name=os.path.basename(path),
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
return types.InputFile(
|
|
|
|
id=file_id,
|
|
|
|
parts=file_total_parts,
|
|
|
|
name=os.path.basename(path),
|
|
|
|
md5_checksum=md5_sum
|
|
|
|
)
|
|
|
|
finally:
|
2018-06-30 09:26:45 +00:00
|
|
|
for _ in workers:
|
|
|
|
await queue.put(None)
|
|
|
|
|
|
|
|
await asyncio.gather(*workers)
|
2018-07-03 14:34:55 +00:00
|
|
|
|
|
|
|
for session in pool:
|
|
|
|
await session.stop()
|
2018-06-20 09:41:22 +00:00
|
|
|
|
2019-06-02 17:13:17 +00:00
|
|
|
async def get_file(self, media_type: int,
|
2018-06-20 09:41:22 +00:00
|
|
|
dc_id: int,
|
2019-06-07 17:26:27 +00:00
|
|
|
document_id: int,
|
2019-06-02 17:13:17 +00:00
|
|
|
access_hash: int,
|
|
|
|
thumb_size: str,
|
|
|
|
peer_id: int,
|
2019-09-07 11:40:26 +00:00
|
|
|
peer_access_hash: int, volume_id: int,
|
2019-06-02 17:13:17 +00:00
|
|
|
local_id: int,
|
|
|
|
file_size: int,
|
|
|
|
|
|
|
|
is_big: bool,
|
|
|
|
progress: callable,
|
2018-12-15 10:12:30 +00:00
|
|
|
progress_args: tuple = ()) -> str:
|
2019-06-05 09:58:29 +00:00
|
|
|
async with self.media_sessions_lock:
|
2018-05-07 12:30:55 +00:00
|
|
|
session = self.media_sessions.get(dc_id, None)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if session is None:
|
2019-09-14 17:57:20 +00:00
|
|
|
if dc_id != self.storage.dc_id():
|
2019-08-12 11:02:32 +00:00
|
|
|
session = Session(self, dc_id, await Auth(self, dc_id).create(), is_media=True)
|
2018-06-20 09:41:22 +00:00
|
|
|
await session.start()
|
2018-04-23 16:33:20 +00:00
|
|
|
|
2019-08-10 20:37:07 +00:00
|
|
|
for _ in range(3):
|
2019-08-12 11:02:32 +00:00
|
|
|
exported_auth = await self.send(
|
2019-08-10 20:37:07 +00:00
|
|
|
functions.auth.ExportAuthorization(
|
|
|
|
dc_id=dc_id
|
|
|
|
)
|
2018-05-07 12:30:55 +00:00
|
|
|
)
|
2019-08-10 20:37:07 +00:00
|
|
|
|
|
|
|
try:
|
2019-08-12 11:02:32 +00:00
|
|
|
await session.send(
|
2019-08-10 20:37:07 +00:00
|
|
|
functions.auth.ImportAuthorization(
|
|
|
|
id=exported_auth.id,
|
|
|
|
bytes=exported_auth.bytes
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except AuthBytesInvalid:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
else:
|
2019-08-12 11:02:32 +00:00
|
|
|
await session.stop()
|
2019-08-10 20:37:07 +00:00
|
|
|
raise AuthBytesInvalid
|
2018-05-07 12:30:55 +00:00
|
|
|
else:
|
2019-09-14 17:57:20 +00:00
|
|
|
session = Session(self, dc_id, self.storage.auth_key(), is_media=True)
|
2018-06-20 09:41:22 +00:00
|
|
|
await session.start()
|
2017-12-14 08:34:58 +00:00
|
|
|
|
2019-08-10 20:37:07 +00:00
|
|
|
self.media_sessions[dc_id] = session
|
2018-05-01 19:08:47 +00:00
|
|
|
|
2019-05-29 08:40:37 +00:00
|
|
|
if media_type == 1:
|
|
|
|
location = types.InputPeerPhotoFileLocation(
|
2019-08-02 00:33:52 +00:00
|
|
|
peer=types.InputPeerUser(
|
|
|
|
user_id=peer_id,
|
|
|
|
access_hash=peer_access_hash
|
|
|
|
),
|
2017-12-23 11:26:26 +00:00
|
|
|
volume_id=volume_id,
|
|
|
|
local_id=local_id,
|
2019-05-29 08:40:37 +00:00
|
|
|
big=is_big or None
|
2019-01-16 14:46:46 +00:00
|
|
|
)
|
2019-05-29 08:40:37 +00:00
|
|
|
elif media_type in (0, 2):
|
|
|
|
location = types.InputPhotoFileLocation(
|
2019-06-07 16:48:34 +00:00
|
|
|
id=document_id,
|
2019-05-29 08:40:37 +00:00
|
|
|
access_hash=access_hash,
|
|
|
|
file_reference=b"",
|
|
|
|
thumb_size=thumb_size
|
2017-12-23 11:26:26 +00:00
|
|
|
)
|
2019-05-29 08:40:37 +00:00
|
|
|
elif media_type == 14:
|
2017-12-23 11:26:26 +00:00
|
|
|
location = types.InputDocumentFileLocation(
|
2019-06-07 16:48:34 +00:00
|
|
|
id=document_id,
|
2017-12-23 11:26:26 +00:00
|
|
|
access_hash=access_hash,
|
2019-05-29 08:40:37 +00:00
|
|
|
file_reference=b"",
|
|
|
|
thumb_size=thumb_size
|
|
|
|
)
|
|
|
|
else:
|
2019-01-16 14:46:46 +00:00
|
|
|
location = types.InputDocumentFileLocation(
|
2019-06-07 16:48:34 +00:00
|
|
|
id=document_id,
|
2019-01-16 14:46:46 +00:00
|
|
|
access_hash=access_hash,
|
2019-05-29 08:40:37 +00:00
|
|
|
file_reference=b"",
|
|
|
|
thumb_size=""
|
2017-12-23 11:26:26 +00:00
|
|
|
)
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-02-18 14:03:33 +00:00
|
|
|
limit = 1024 * 1024
|
2017-12-19 13:00:19 +00:00
|
|
|
offset = 0
|
2018-03-21 12:39:23 +00:00
|
|
|
file_name = ""
|
2017-12-19 13:00:19 +00:00
|
|
|
|
|
|
|
try:
|
2018-06-20 09:41:22 +00:00
|
|
|
r = await session.send(
|
2017-12-19 13:00:19 +00:00
|
|
|
functions.upload.GetFile(
|
2017-12-23 11:26:26 +00:00
|
|
|
location=location,
|
2017-12-19 13:00:19 +00:00
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2017-12-20 15:21:56 +00:00
|
|
|
if isinstance(r, types.upload.File):
|
2018-03-30 20:41:34 +00:00
|
|
|
with tempfile.NamedTemporaryFile("wb", delete=False) as f:
|
2018-03-21 09:07:55 +00:00
|
|
|
file_name = f.name
|
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
while True:
|
|
|
|
chunk = r.bytes
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if not chunk:
|
|
|
|
break
|
2018-03-20 12:04:35 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
f.write(chunk)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
offset += limit
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if progress:
|
2019-06-02 17:13:17 +00:00
|
|
|
await progress(
|
2019-05-29 08:40:37 +00:00
|
|
|
min(offset, file_size)
|
|
|
|
if file_size != 0
|
|
|
|
else offset,
|
|
|
|
file_size,
|
|
|
|
*progress_args
|
|
|
|
)
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
r = await session.send(
|
2018-03-20 20:20:04 +00:00
|
|
|
functions.upload.GetFile(
|
|
|
|
location=location,
|
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
2017-12-20 15:21:56 +00:00
|
|
|
)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-21 08:01:18 +00:00
|
|
|
elif isinstance(r, types.upload.FileCdnRedirect):
|
2019-06-05 09:58:29 +00:00
|
|
|
async with self.media_sessions_lock:
|
2018-05-01 16:15:33 +00:00
|
|
|
cdn_session = self.media_sessions.get(r.dc_id, None)
|
|
|
|
|
|
|
|
if cdn_session is None:
|
|
|
|
cdn_session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-05-01 16:15:33 +00:00
|
|
|
r.dc_id,
|
2019-06-20 02:17:24 +00:00
|
|
|
await Auth(self, r.dc_id).create(), is_media=True, is_cdn=True)
|
2018-05-01 16:15:33 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
await cdn_session.start()
|
2018-05-01 18:25:35 +00:00
|
|
|
|
2018-05-01 19:08:47 +00:00
|
|
|
self.media_sessions[r.dc_id] = cdn_session
|
|
|
|
|
2017-12-19 13:00:19 +00:00
|
|
|
try:
|
2018-03-30 20:41:34 +00:00
|
|
|
with tempfile.NamedTemporaryFile("wb", delete=False) as f:
|
2018-03-21 09:07:55 +00:00
|
|
|
file_name = f.name
|
2018-03-21 12:39:23 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
while True:
|
2018-06-20 09:41:22 +00:00
|
|
|
r2 = await cdn_session.send(
|
2018-03-20 20:20:04 +00:00
|
|
|
functions.upload.GetCdnFile(
|
|
|
|
file_token=r.file_token,
|
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
2017-12-19 13:00:19 +00:00
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if isinstance(r2, types.upload.CdnFileReuploadNeeded):
|
|
|
|
try:
|
2018-06-20 09:41:22 +00:00
|
|
|
await session.send(
|
2018-03-20 20:20:04 +00:00
|
|
|
functions.upload.ReuploadCdnFile(
|
|
|
|
file_token=r.file_token,
|
|
|
|
request_token=r2.request_token
|
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
)
|
2018-03-20 20:20:04 +00:00
|
|
|
except VolumeLocNotFound:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
continue
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
chunk = r2.bytes
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
# https://core.telegram.org/cdn#decrypting-files
|
2018-05-19 13:36:38 +00:00
|
|
|
decrypted_chunk = AES.ctr256_decrypt(
|
2018-03-20 20:20:04 +00:00
|
|
|
chunk,
|
|
|
|
r.encryption_key,
|
2018-05-19 13:36:38 +00:00
|
|
|
bytearray(
|
|
|
|
r.encryption_iv[:-4]
|
|
|
|
+ (offset // 16).to_bytes(4, "big")
|
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
)
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-06-20 09:41:22 +00:00
|
|
|
hashes = await session.send(
|
2018-03-20 20:20:04 +00:00
|
|
|
functions.upload.GetCdnFileHashes(
|
2019-03-16 15:50:40 +00:00
|
|
|
file_token=r.file_token,
|
|
|
|
offset=offset
|
2018-03-20 20:20:04 +00:00
|
|
|
)
|
|
|
|
)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
# https://core.telegram.org/cdn#verifying-files
|
|
|
|
for i, h in enumerate(hashes):
|
|
|
|
cdn_chunk = decrypted_chunk[h.limit * i: h.limit * (i + 1)]
|
|
|
|
assert h.hash == sha256(cdn_chunk).digest(), "Invalid CDN hash part {}".format(i)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
f.write(decrypted_chunk)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
offset += limit
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if progress:
|
2019-06-02 17:13:17 +00:00
|
|
|
await progress(
|
2019-05-29 08:40:37 +00:00
|
|
|
min(offset, file_size)
|
|
|
|
if file_size != 0
|
|
|
|
else offset,
|
|
|
|
file_size,
|
|
|
|
*progress_args
|
|
|
|
)
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if len(chunk) < limit:
|
|
|
|
break
|
2017-12-19 13:00:19 +00:00
|
|
|
except Exception as e:
|
2018-03-20 20:20:04 +00:00
|
|
|
raise e
|
2017-12-19 13:00:19 +00:00
|
|
|
except Exception as e:
|
2019-01-11 13:03:16 +00:00
|
|
|
if not isinstance(e, Client.StopTransmission):
|
2019-09-08 17:24:06 +00:00
|
|
|
log.error(e, exc_info=True)
|
2018-03-20 20:20:04 +00:00
|
|
|
|
2018-03-21 08:01:18 +00:00
|
|
|
try:
|
|
|
|
os.remove(file_name)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2018-03-21 12:39:23 +00:00
|
|
|
|
|
|
|
return ""
|
2017-12-19 13:00:19 +00:00
|
|
|
else:
|
2018-03-20 20:20:04 +00:00
|
|
|
return file_name
|
2019-04-20 15:59:42 +00:00
|
|
|
|
|
|
|
def guess_mime_type(self, filename: str):
|
|
|
|
extension = os.path.splitext(filename)[1]
|
|
|
|
return self.extensions_to_mime_types.get(extension)
|
|
|
|
|
|
|
|
def guess_extension(self, mime_type: str):
|
|
|
|
extensions = self.mime_types_to_extensions.get(mime_type)
|
|
|
|
|
|
|
|
if extensions:
|
|
|
|
return extensions.split(" ")[0]
|