2017-12-05 11:42:09 +00:00
|
|
|
# Pyrogram - Telegram MTProto API Client Library for Python
|
2018-01-01 12:21:23 +00:00
|
|
|
# Copyright (C) 2017-2018 Dan Tès <https://github.com/delivrance>
|
2017-12-05 11:42:09 +00:00
|
|
|
#
|
|
|
|
# This file is part of Pyrogram.
|
|
|
|
#
|
|
|
|
# Pyrogram is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Lesser General Public License as published
|
|
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Pyrogram is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public License
|
|
|
|
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import base64
|
2018-03-18 11:12:27 +00:00
|
|
|
import binascii
|
2018-06-01 17:27:05 +00:00
|
|
|
import getpass
|
2017-12-05 11:42:09 +00:00
|
|
|
import json
|
|
|
|
import logging
|
2017-12-14 08:34:58 +00:00
|
|
|
import math
|
2017-12-14 09:57:30 +00:00
|
|
|
import mimetypes
|
2017-12-14 08:34:58 +00:00
|
|
|
import os
|
2017-12-25 11:30:48 +00:00
|
|
|
import re
|
2018-03-21 16:39:53 +00:00
|
|
|
import shutil
|
2018-03-17 18:24:27 +00:00
|
|
|
import struct
|
2018-03-21 12:39:23 +00:00
|
|
|
import tempfile
|
2018-02-08 19:46:47 +00:00
|
|
|
import threading
|
2017-12-05 11:42:09 +00:00
|
|
|
import time
|
|
|
|
from configparser import ConfigParser
|
2018-02-18 17:11:33 +00:00
|
|
|
from datetime import datetime
|
2017-12-14 08:34:58 +00:00
|
|
|
from hashlib import sha256, md5
|
2017-12-17 12:52:33 +00:00
|
|
|
from signal import signal, SIGINT, SIGTERM, SIGABRT
|
2018-05-11 10:50:48 +00:00
|
|
|
from threading import Thread
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
from pyrogram.api import functions, types
|
|
|
|
from pyrogram.api.core import Object
|
|
|
|
from pyrogram.api.errors import (
|
|
|
|
PhoneMigrate, NetworkMigrate, PhoneNumberInvalid,
|
|
|
|
PhoneNumberUnoccupied, PhoneCodeInvalid, PhoneCodeHashEmpty,
|
|
|
|
PhoneCodeExpired, PhoneCodeEmpty, SessionPasswordNeeded,
|
2018-05-07 12:30:55 +00:00
|
|
|
PasswordHashInvalid, FloodWait, PeerIdInvalid, FirstnameInvalid, PhoneNumberBanned,
|
2018-10-01 07:40:34 +00:00
|
|
|
VolumeLocNotFound, UserMigrate, FileIdInvalid, ChannelPrivate)
|
2018-05-23 12:27:17 +00:00
|
|
|
from pyrogram.client.handlers import DisconnectHandler
|
2018-01-28 00:44:38 +00:00
|
|
|
from pyrogram.crypto import AES
|
2017-12-05 11:42:09 +00:00
|
|
|
from pyrogram.session import Auth, Session
|
2018-04-13 14:30:19 +00:00
|
|
|
from .dispatcher import Dispatcher
|
2018-05-07 12:30:55 +00:00
|
|
|
from .ext import utils, Syncer, BaseClient
|
|
|
|
from .methods import Methods
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-03-22 13:36:46 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
class Client(Methods, BaseClient):
|
2018-01-03 16:39:12 +00:00
|
|
|
"""This class represents a Client, the main mean for interacting with Telegram.
|
|
|
|
It exposes bot-like methods for an easy access to the API as well as a simple way to
|
|
|
|
invoke every single Telegram API method available.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
|
|
|
Args:
|
2018-03-25 19:41:19 +00:00
|
|
|
session_name (``str``):
|
2018-03-24 14:02:03 +00:00
|
|
|
Name to uniquely identify a session of either a User or a Bot.
|
|
|
|
For Users: pass a string of your choice, e.g.: "my_main_account".
|
|
|
|
For Bots: pass your Bot API token, e.g.: "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11"
|
|
|
|
Note: as long as a valid User session file exists, Pyrogram won't ask you again to input your phone number.
|
2018-01-03 16:39:12 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
api_id (``int``, *optional*):
|
2018-04-05 10:55:34 +00:00
|
|
|
The *api_id* part of your Telegram API Key, as integer. E.g.: 12345
|
|
|
|
This is an alternative way to pass it if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
api_hash (``str``, *optional*):
|
2018-04-05 10:55:34 +00:00
|
|
|
The *api_hash* part of your Telegram API Key, as string. E.g.: "0123456789abcdef0123456789abcdef"
|
|
|
|
This is an alternative way to pass it if you don't want to use the *config.ini* file.
|
2018-02-21 12:31:27 +00:00
|
|
|
|
2018-06-24 17:08:52 +00:00
|
|
|
app_version (``str``, *optional*):
|
|
|
|
Application version. Defaults to "Pyrogram \U0001f525 vX.Y.Z"
|
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
device_model (``str``, *optional*):
|
|
|
|
Device model. Defaults to *platform.python_implementation() + " " + platform.python_version()*
|
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
system_version (``str``, *optional*):
|
|
|
|
Operating System version. Defaults to *platform.system() + " " + platform.release()*
|
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
|
|
|
lang_code (``str``, *optional*):
|
|
|
|
Code of the language used on the client, in ISO 639-1 standard. Defaults to "en".
|
|
|
|
This is an alternative way to set it if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-09-22 12:21:55 +00:00
|
|
|
ipv6 (``bool``, *optional*):
|
|
|
|
Pass True to connect to Telegram using IPv6.
|
|
|
|
Defaults to False (IPv4).
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
proxy (``dict``, *optional*):
|
2018-03-24 14:02:03 +00:00
|
|
|
Your SOCKS5 Proxy settings as dict,
|
|
|
|
e.g.: *dict(hostname="11.22.33.44", port=1080, username="user", password="pass")*.
|
2018-02-21 12:31:27 +00:00
|
|
|
*username* and *password* can be omitted if your proxy doesn't require authorization.
|
|
|
|
This is an alternative way to setup a proxy if you don't want to use the *config.ini* file.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
test_mode (``bool``, *optional*):
|
2018-01-15 12:42:05 +00:00
|
|
|
Enable or disable log-in to testing servers. Defaults to False.
|
2018-01-03 16:39:12 +00:00
|
|
|
Only applicable for new sessions and will be ignored in case previously
|
|
|
|
created sessions are loaded.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
phone_number (``str``, *optional*):
|
2018-01-24 20:46:28 +00:00
|
|
|
Pass your phone number (with your Country Code prefix included) to avoid
|
|
|
|
entering it manually. Only applicable for new sessions.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
phone_code (``str`` | ``callable``, *optional*):
|
2018-06-24 14:53:07 +00:00
|
|
|
Pass the phone code as string (for test numbers only), or pass a callback function which accepts
|
|
|
|
a single positional argument *(phone_number)* and must return the correct phone code (e.g., "12345").
|
2018-01-24 20:46:28 +00:00
|
|
|
Only applicable for new sessions.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
password (``str``, *optional*):
|
2018-01-24 20:46:28 +00:00
|
|
|
Pass your Two-Step Verification password (if you have one) to avoid entering it
|
|
|
|
manually. Only applicable for new sessions.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
force_sms (``str``, *optional*):
|
2018-04-01 16:17:20 +00:00
|
|
|
Pass True to force Telegram sending the authorization code via SMS.
|
|
|
|
Only applicable for new sessions.
|
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
first_name (``str``, *optional*):
|
2018-01-24 20:46:28 +00:00
|
|
|
Pass a First Name to avoid entering it manually. It will be used to automatically
|
|
|
|
create a new Telegram account in case the phone number you passed is not registered yet.
|
2018-04-01 16:18:06 +00:00
|
|
|
Only applicable for new sessions.
|
2018-01-24 20:46:28 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
last_name (``str``, *optional*):
|
2018-01-24 20:46:28 +00:00
|
|
|
Same purpose as *first_name*; pass a Last Name to avoid entering it manually. It can
|
2018-04-01 16:18:06 +00:00
|
|
|
be an empty string: "". Only applicable for new sessions.
|
2018-01-26 10:49:07 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
workers (``int``, *optional*):
|
2018-02-13 11:08:10 +00:00
|
|
|
Thread pool size for handling incoming updates. Defaults to 4.
|
2018-04-22 10:41:50 +00:00
|
|
|
|
2018-04-30 17:30:16 +00:00
|
|
|
workdir (``str``, *optional*):
|
2018-04-22 10:41:50 +00:00
|
|
|
Define a custom working directory. The working directory is the location in your filesystem
|
|
|
|
where Pyrogram will store your session files. Defaults to "." (current directory).
|
2018-05-11 11:10:49 +00:00
|
|
|
|
|
|
|
config_file (``str``, *optional*):
|
|
|
|
Path of the configuration file. Defaults to ./config.ini
|
2017-12-29 20:44:45 +00:00
|
|
|
"""
|
|
|
|
|
2018-01-24 16:53:30 +00:00
|
|
|
def __init__(self,
|
|
|
|
session_name: str,
|
2018-04-08 10:44:27 +00:00
|
|
|
api_id: int or str = None,
|
2018-04-05 10:55:34 +00:00
|
|
|
api_hash: str = None,
|
2018-06-24 16:47:49 +00:00
|
|
|
app_version: str = None,
|
|
|
|
device_model: str = None,
|
|
|
|
system_version: str = None,
|
|
|
|
lang_code: str = None,
|
2018-06-13 11:38:14 +00:00
|
|
|
ipv6: bool = False,
|
2018-04-13 10:30:13 +00:00
|
|
|
proxy: dict = None,
|
2018-01-24 16:53:30 +00:00
|
|
|
test_mode: bool = False,
|
|
|
|
phone_number: str = None,
|
|
|
|
phone_code: str or callable = None,
|
|
|
|
password: str = None,
|
2018-04-01 15:38:22 +00:00
|
|
|
force_sms: bool = False,
|
2018-01-24 16:53:30 +00:00
|
|
|
first_name: str = None,
|
2018-01-26 10:41:09 +00:00
|
|
|
last_name: str = None,
|
2018-10-13 08:41:58 +00:00
|
|
|
workers: int = BaseClient.WORKERS,
|
2018-10-13 08:38:44 +00:00
|
|
|
workdir: str = BaseClient.WORKDIR,
|
|
|
|
config_file: str = BaseClient.CONFIG_FILE):
|
2018-05-07 12:30:55 +00:00
|
|
|
super().__init__()
|
|
|
|
|
2017-12-05 11:42:09 +00:00
|
|
|
self.session_name = session_name
|
2018-04-08 11:21:16 +00:00
|
|
|
self.api_id = int(api_id) if api_id else None
|
2018-04-05 10:55:34 +00:00
|
|
|
self.api_hash = api_hash
|
2018-06-24 16:47:49 +00:00
|
|
|
self.app_version = app_version
|
|
|
|
self.device_model = device_model
|
|
|
|
self.system_version = system_version
|
|
|
|
self.lang_code = lang_code
|
2018-06-13 11:38:14 +00:00
|
|
|
self.ipv6 = ipv6
|
2018-05-25 09:52:40 +00:00
|
|
|
# TODO: Make code consistent, use underscore for private/protected fields
|
|
|
|
self._proxy = proxy
|
2017-12-05 11:42:09 +00:00
|
|
|
self.test_mode = test_mode
|
2018-01-24 16:53:30 +00:00
|
|
|
self.phone_number = phone_number
|
|
|
|
self.phone_code = phone_code
|
2018-05-07 12:30:55 +00:00
|
|
|
self.password = password
|
|
|
|
self.force_sms = force_sms
|
2018-01-24 16:53:30 +00:00
|
|
|
self.first_name = first_name
|
|
|
|
self.last_name = last_name
|
2018-01-26 10:41:09 +00:00
|
|
|
self.workers = workers
|
2018-04-19 08:06:41 +00:00
|
|
|
self.workdir = workdir
|
2018-05-11 11:10:49 +00:00
|
|
|
self.config_file = config_file
|
2018-01-26 10:41:09 +00:00
|
|
|
|
2018-04-06 16:48:41 +00:00
|
|
|
self.dispatcher = Dispatcher(self, workers)
|
2018-02-08 19:46:47 +00:00
|
|
|
|
2018-09-17 16:53:04 +00:00
|
|
|
def __enter__(self):
|
|
|
|
self.start()
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.stop()
|
|
|
|
|
2018-05-25 09:52:40 +00:00
|
|
|
@property
|
|
|
|
def proxy(self):
|
|
|
|
return self._proxy
|
|
|
|
|
|
|
|
@proxy.setter
|
|
|
|
def proxy(self, value):
|
|
|
|
self._proxy["enabled"] = True
|
|
|
|
self._proxy.update(value)
|
|
|
|
|
2018-06-22 11:08:27 +00:00
|
|
|
def start(self):
|
2018-01-03 16:39:12 +00:00
|
|
|
"""Use this method to start the Client after creating it.
|
2018-01-24 20:46:28 +00:00
|
|
|
Requires no parameters.
|
|
|
|
|
|
|
|
Raises:
|
2018-03-25 19:59:04 +00:00
|
|
|
:class:`Error <pyrogram.Error>`
|
2018-01-24 20:46:28 +00:00
|
|
|
"""
|
2018-04-03 09:45:19 +00:00
|
|
|
if self.is_started:
|
|
|
|
raise ConnectionError("Client has already been started")
|
|
|
|
|
2018-03-24 14:02:03 +00:00
|
|
|
if self.BOT_TOKEN_RE.match(self.session_name):
|
2018-08-27 00:11:07 +00:00
|
|
|
self.bot_token = self.session_name
|
2018-03-24 14:02:03 +00:00
|
|
|
self.session_name = self.session_name.split(":")[0]
|
|
|
|
|
2017-12-29 20:44:45 +00:00
|
|
|
self.load_config()
|
2018-04-13 13:03:46 +00:00
|
|
|
self.load_session()
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-01-26 10:41:09 +00:00
|
|
|
self.session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-01-26 10:41:09 +00:00
|
|
|
self.dc_id,
|
2018-06-24 16:26:11 +00:00
|
|
|
self.auth_key
|
2018-01-26 10:41:09 +00:00
|
|
|
)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-03-15 11:18:48 +00:00
|
|
|
self.session.start()
|
2018-03-19 02:37:43 +00:00
|
|
|
self.is_started = True
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-09-15 11:23:25 +00:00
|
|
|
try:
|
|
|
|
if self.user_id is None:
|
2018-09-17 13:59:49 +00:00
|
|
|
if self.bot_token is None:
|
2018-09-15 11:23:25 +00:00
|
|
|
self.authorize_user()
|
|
|
|
else:
|
|
|
|
self.authorize_bot()
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-09-15 11:23:25 +00:00
|
|
|
self.save_session()
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-09-17 13:59:49 +00:00
|
|
|
if self.bot_token is None:
|
2018-09-15 11:23:25 +00:00
|
|
|
now = time.time()
|
2018-04-13 13:47:07 +00:00
|
|
|
|
2018-09-15 11:23:25 +00:00
|
|
|
if abs(now - self.date) > Client.OFFLINE_SLEEP:
|
|
|
|
self.peers_by_username = {}
|
|
|
|
self.peers_by_phone = {}
|
2018-04-13 14:41:20 +00:00
|
|
|
|
2018-09-15 11:23:25 +00:00
|
|
|
self.get_initial_dialogs()
|
|
|
|
self.get_contacts()
|
|
|
|
else:
|
|
|
|
self.send(functions.messages.GetPinnedDialogs())
|
|
|
|
self.get_initial_dialogs_chunk()
|
2018-04-13 13:47:07 +00:00
|
|
|
else:
|
2018-09-15 11:23:25 +00:00
|
|
|
self.send(functions.updates.GetState())
|
|
|
|
except Exception as e:
|
|
|
|
self.is_started = False
|
|
|
|
self.session.stop()
|
|
|
|
raise e
|
2018-03-15 11:18:48 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
for i in range(self.UPDATES_WORKERS):
|
2018-04-13 17:09:00 +00:00
|
|
|
self.updates_workers_list.append(
|
|
|
|
Thread(
|
|
|
|
target=self.updates_worker,
|
|
|
|
name="UpdatesWorker#{}".format(i + 1)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.updates_workers_list[-1].start()
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
for i in range(self.DOWNLOAD_WORKERS):
|
2018-04-13 17:09:00 +00:00
|
|
|
self.download_workers_list.append(
|
|
|
|
Thread(
|
|
|
|
target=self.download_worker,
|
|
|
|
name="DownloadWorker#{}".format(i + 1)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.download_workers_list[-1].start()
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2018-04-06 16:48:41 +00:00
|
|
|
self.dispatcher.start()
|
|
|
|
|
2017-12-29 20:44:45 +00:00
|
|
|
mimetypes.init()
|
2018-04-13 13:21:34 +00:00
|
|
|
Syncer.add(self)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
|
|
|
def stop(self):
|
2018-01-03 16:39:12 +00:00
|
|
|
"""Use this method to manually stop the Client.
|
|
|
|
Requires no parameters.
|
|
|
|
"""
|
2018-04-03 12:54:34 +00:00
|
|
|
if not self.is_started:
|
|
|
|
raise ConnectionError("Client is already stopped")
|
|
|
|
|
2018-05-01 16:15:33 +00:00
|
|
|
Syncer.remove(self)
|
|
|
|
self.dispatcher.stop()
|
2018-04-29 18:20:34 +00:00
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
for _ in range(self.DOWNLOAD_WORKERS):
|
|
|
|
self.download_queue.put(None)
|
|
|
|
|
2018-04-13 17:09:00 +00:00
|
|
|
for i in self.download_workers_list:
|
|
|
|
i.join()
|
|
|
|
|
2018-04-29 18:20:34 +00:00
|
|
|
self.download_workers_list.clear()
|
|
|
|
|
2018-05-01 16:15:33 +00:00
|
|
|
for _ in range(self.UPDATES_WORKERS):
|
|
|
|
self.updates_queue.put(None)
|
|
|
|
|
|
|
|
for i in self.updates_workers_list:
|
|
|
|
i.join()
|
|
|
|
|
|
|
|
self.updates_workers_list.clear()
|
|
|
|
|
|
|
|
for i in self.media_sessions.values():
|
|
|
|
i.stop()
|
2018-04-06 16:48:41 +00:00
|
|
|
|
2018-05-01 19:08:47 +00:00
|
|
|
self.media_sessions.clear()
|
|
|
|
|
2018-04-13 17:09:00 +00:00
|
|
|
self.is_started = False
|
|
|
|
self.session.stop()
|
|
|
|
|
2018-06-22 11:08:01 +00:00
|
|
|
def idle(self, stop_signals: tuple = (SIGINT, SIGTERM, SIGABRT)):
|
|
|
|
"""Blocks the program execution until one of the signals are received,
|
|
|
|
then gently stop the Client by closing the underlying connection.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
stop_signals (``tuple``, *optional*):
|
|
|
|
Iterable containing signals the signal handler will listen to.
|
|
|
|
Defaults to (SIGINT, SIGTERM, SIGABRT).
|
|
|
|
"""
|
2018-06-24 17:09:22 +00:00
|
|
|
|
2018-06-23 13:45:48 +00:00
|
|
|
def signal_handler(*args):
|
|
|
|
self.is_idle = False
|
|
|
|
|
2018-06-22 11:08:01 +00:00
|
|
|
for s in stop_signals:
|
2018-06-23 13:45:48 +00:00
|
|
|
signal(s, signal_handler)
|
2018-06-22 11:08:01 +00:00
|
|
|
|
|
|
|
self.is_idle = True
|
|
|
|
|
|
|
|
while self.is_idle:
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
self.stop()
|
|
|
|
|
2018-06-22 11:10:09 +00:00
|
|
|
def run(self):
|
2018-06-22 11:12:31 +00:00
|
|
|
"""Use this method to automatically start and idle a Client.
|
2018-06-22 11:10:09 +00:00
|
|
|
Requires no parameters.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
:class:`Error <pyrogram.Error>`
|
|
|
|
"""
|
|
|
|
self.start()
|
|
|
|
self.idle()
|
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
def add_handler(self, handler, group: int = 0):
|
|
|
|
"""Use this method to register an update handler.
|
|
|
|
|
|
|
|
You can register multiple handlers, but at most one handler within a group
|
|
|
|
will be used for a single update. To handle the same update more than once, register
|
|
|
|
your handler using a different group id (lower group id == higher priority).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
handler (``Handler``):
|
|
|
|
The handler to be registered.
|
|
|
|
|
|
|
|
group (``int``, *optional*):
|
|
|
|
The group identifier, defaults to 0.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple of (handler, group)
|
|
|
|
"""
|
2018-05-23 12:27:17 +00:00
|
|
|
if isinstance(handler, DisconnectHandler):
|
|
|
|
self.disconnect_handler = handler.callback
|
|
|
|
else:
|
|
|
|
self.dispatcher.add_handler(handler, group)
|
2018-05-07 12:30:55 +00:00
|
|
|
|
|
|
|
return handler, group
|
|
|
|
|
|
|
|
def remove_handler(self, handler, group: int = 0):
|
|
|
|
"""Removes a previously-added update handler.
|
|
|
|
|
|
|
|
Make sure to provide the right group that the handler was added in. You can use
|
|
|
|
the return value of the :meth:`add_handler` method, a tuple of (handler, group), and
|
|
|
|
pass it directly.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
handler (``Handler``):
|
|
|
|
The handler to be removed.
|
|
|
|
|
|
|
|
group (``int``, *optional*):
|
|
|
|
The group identifier, defaults to 0.
|
|
|
|
"""
|
2018-05-23 12:27:17 +00:00
|
|
|
if isinstance(handler, DisconnectHandler):
|
|
|
|
self.disconnect_handler = None
|
|
|
|
else:
|
|
|
|
self.dispatcher.remove_handler(handler, group)
|
2018-05-07 12:30:55 +00:00
|
|
|
|
2018-03-15 11:18:48 +00:00
|
|
|
def authorize_bot(self):
|
|
|
|
try:
|
|
|
|
r = self.send(
|
|
|
|
functions.auth.ImportBotAuthorization(
|
|
|
|
flags=0,
|
2018-04-05 10:55:34 +00:00
|
|
|
api_id=self.api_id,
|
|
|
|
api_hash=self.api_hash,
|
2018-08-27 00:11:07 +00:00
|
|
|
bot_auth_token=self.bot_token
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
except UserMigrate as e:
|
|
|
|
self.session.stop()
|
|
|
|
|
|
|
|
self.dc_id = e.x
|
2018-06-13 11:38:14 +00:00
|
|
|
self.auth_key = Auth(self.dc_id, self.test_mode, self.ipv6, self._proxy).create()
|
2018-03-15 11:18:48 +00:00
|
|
|
|
|
|
|
self.session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-03-15 11:18:48 +00:00
|
|
|
self.dc_id,
|
2018-06-24 16:26:11 +00:00
|
|
|
self.auth_key
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
self.session.start()
|
|
|
|
self.authorize_bot()
|
|
|
|
else:
|
|
|
|
self.user_id = r.user.id
|
|
|
|
|
|
|
|
def authorize_user(self):
|
|
|
|
phone_number_invalid_raises = self.phone_number is not None
|
|
|
|
phone_code_invalid_raises = self.phone_code is not None
|
|
|
|
password_hash_invalid_raises = self.password is not None
|
|
|
|
first_name_invalid_raises = self.first_name is not None
|
|
|
|
|
|
|
|
while True:
|
|
|
|
if self.phone_number is None:
|
|
|
|
self.phone_number = input("Enter phone number: ")
|
|
|
|
|
|
|
|
while True:
|
|
|
|
confirm = input("Is \"{}\" correct? (y/n): ".format(self.phone_number))
|
|
|
|
|
|
|
|
if confirm in ("y", "1"):
|
|
|
|
break
|
|
|
|
elif confirm in ("n", "2"):
|
|
|
|
self.phone_number = input("Enter phone number: ")
|
|
|
|
|
|
|
|
self.phone_number = self.phone_number.strip("+")
|
|
|
|
|
|
|
|
try:
|
|
|
|
r = self.send(
|
|
|
|
functions.auth.SendCode(
|
|
|
|
self.phone_number,
|
2018-04-05 10:55:34 +00:00
|
|
|
self.api_id,
|
|
|
|
self.api_hash
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
except (PhoneMigrate, NetworkMigrate) as e:
|
|
|
|
self.session.stop()
|
|
|
|
|
|
|
|
self.dc_id = e.x
|
2018-06-13 11:38:14 +00:00
|
|
|
self.auth_key = Auth(self.dc_id, self.test_mode, self.ipv6, self._proxy).create()
|
2018-03-15 11:18:48 +00:00
|
|
|
|
|
|
|
self.session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-03-15 11:18:48 +00:00
|
|
|
self.dc_id,
|
2018-06-24 16:26:11 +00:00
|
|
|
self.auth_key
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
self.session.start()
|
|
|
|
|
|
|
|
r = self.send(
|
|
|
|
functions.auth.SendCode(
|
|
|
|
self.phone_number,
|
2018-04-05 10:55:34 +00:00
|
|
|
self.api_id,
|
|
|
|
self.api_hash
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
break
|
|
|
|
except (PhoneNumberInvalid, PhoneNumberBanned) as e:
|
|
|
|
if phone_number_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.phone_number = None
|
|
|
|
except FloodWait as e:
|
2018-05-22 14:16:28 +00:00
|
|
|
if phone_number_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
2018-03-15 11:18:48 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
phone_registered = r.phone_registered
|
|
|
|
phone_code_hash = r.phone_code_hash
|
2018-06-23 14:08:28 +00:00
|
|
|
terms_of_service = r.terms_of_service
|
2018-03-15 11:18:48 +00:00
|
|
|
|
2018-06-23 14:15:44 +00:00
|
|
|
if terms_of_service:
|
|
|
|
print("\n" + terms_of_service.text + "\n")
|
2018-03-15 11:18:48 +00:00
|
|
|
|
2018-04-01 15:38:22 +00:00
|
|
|
if self.force_sms:
|
|
|
|
self.send(
|
|
|
|
functions.auth.ResendCode(
|
|
|
|
phone_number=self.phone_number,
|
|
|
|
phone_code_hash=phone_code_hash
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-03-15 11:18:48 +00:00
|
|
|
while True:
|
|
|
|
self.phone_code = (
|
|
|
|
input("Enter phone code: ") if self.phone_code is None
|
|
|
|
else self.phone_code if type(self.phone_code) is str
|
2018-06-24 14:11:22 +00:00
|
|
|
else str(self.phone_code(self.phone_number))
|
2018-03-15 11:18:48 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
if phone_registered:
|
|
|
|
r = self.send(
|
|
|
|
functions.auth.SignIn(
|
|
|
|
self.phone_number,
|
|
|
|
phone_code_hash,
|
|
|
|
self.phone_code
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
self.send(
|
|
|
|
functions.auth.SignIn(
|
|
|
|
self.phone_number,
|
|
|
|
phone_code_hash,
|
|
|
|
self.phone_code
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except PhoneNumberUnoccupied:
|
|
|
|
pass
|
|
|
|
|
|
|
|
self.first_name = self.first_name if self.first_name is not None else input("First name: ")
|
|
|
|
self.last_name = self.last_name if self.last_name is not None else input("Last name: ")
|
|
|
|
|
|
|
|
r = self.send(
|
|
|
|
functions.auth.SignUp(
|
|
|
|
self.phone_number,
|
|
|
|
phone_code_hash,
|
|
|
|
self.phone_code,
|
|
|
|
self.first_name,
|
|
|
|
self.last_name
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except (PhoneCodeInvalid, PhoneCodeEmpty, PhoneCodeExpired, PhoneCodeHashEmpty) as e:
|
|
|
|
if phone_code_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.phone_code = None
|
|
|
|
except FirstnameInvalid as e:
|
|
|
|
if first_name_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.first_name = None
|
|
|
|
except SessionPasswordNeeded as e:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
r = self.send(functions.account.GetPassword())
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
|
|
|
|
if self.password is None:
|
|
|
|
print("Hint: {}".format(r.hint))
|
2018-06-01 17:27:05 +00:00
|
|
|
self.password = getpass.getpass("Enter password: ")
|
2018-03-15 11:18:48 +00:00
|
|
|
|
|
|
|
if type(self.password) is str:
|
|
|
|
self.password = r.current_salt + self.password.encode() + r.current_salt
|
|
|
|
|
|
|
|
password_hash = sha256(self.password).digest()
|
|
|
|
|
|
|
|
r = self.send(functions.auth.CheckPassword(password_hash))
|
|
|
|
except PasswordHashInvalid as e:
|
|
|
|
if password_hash_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE)
|
|
|
|
self.password = None
|
|
|
|
except FloodWait as e:
|
2018-05-22 14:16:28 +00:00
|
|
|
if password_hash_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
2018-03-15 11:18:48 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
break
|
|
|
|
except FloodWait as e:
|
2018-05-22 14:16:28 +00:00
|
|
|
if phone_code_invalid_raises or first_name_invalid_raises:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
print(e.MESSAGE.format(x=e.x))
|
|
|
|
time.sleep(e.x)
|
2018-03-15 11:18:48 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2018-06-23 14:15:44 +00:00
|
|
|
if terms_of_service:
|
|
|
|
assert self.send(functions.help.AcceptTermsOfService(terms_of_service.id))
|
2018-06-23 14:08:28 +00:00
|
|
|
|
2018-03-15 11:18:48 +00:00
|
|
|
self.password = None
|
|
|
|
self.user_id = r.user.id
|
|
|
|
|
2018-06-27 12:42:04 +00:00
|
|
|
print("Login successful")
|
|
|
|
|
2018-02-09 00:52:40 +00:00
|
|
|
def fetch_peers(self, entities: list):
|
|
|
|
for entity in entities:
|
2018-03-26 11:41:00 +00:00
|
|
|
if isinstance(entity, types.User):
|
2018-02-09 00:52:40 +00:00
|
|
|
user_id = entity.id
|
|
|
|
|
|
|
|
access_hash = entity.access_hash
|
|
|
|
|
|
|
|
if access_hash is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
username = entity.username
|
2018-02-20 14:28:01 +00:00
|
|
|
phone = entity.phone
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-03-26 11:41:00 +00:00
|
|
|
input_peer = types.InputPeerUser(
|
2018-02-09 00:52:40 +00:00
|
|
|
user_id=user_id,
|
|
|
|
access_hash=access_hash
|
|
|
|
)
|
|
|
|
|
|
|
|
self.peers_by_id[user_id] = input_peer
|
|
|
|
|
|
|
|
if username is not None:
|
2018-03-19 00:40:36 +00:00
|
|
|
self.peers_by_username[username.lower()] = input_peer
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-02-20 14:28:01 +00:00
|
|
|
if phone is not None:
|
|
|
|
self.peers_by_phone[phone] = input_peer
|
|
|
|
|
2018-09-02 11:04:29 +00:00
|
|
|
if isinstance(entity, (types.Chat, types.ChatForbidden)):
|
2018-02-09 00:52:40 +00:00
|
|
|
chat_id = entity.id
|
2018-03-08 11:28:38 +00:00
|
|
|
peer_id = -chat_id
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-03-26 11:41:00 +00:00
|
|
|
input_peer = types.InputPeerChat(
|
2018-02-09 00:52:40 +00:00
|
|
|
chat_id=chat_id
|
|
|
|
)
|
|
|
|
|
2018-03-08 11:28:38 +00:00
|
|
|
self.peers_by_id[peer_id] = input_peer
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-09-02 11:04:29 +00:00
|
|
|
if isinstance(entity, (types.Channel, types.ChannelForbidden)):
|
2018-02-09 00:52:40 +00:00
|
|
|
channel_id = entity.id
|
|
|
|
peer_id = int("-100" + str(channel_id))
|
|
|
|
|
|
|
|
access_hash = entity.access_hash
|
|
|
|
|
|
|
|
if access_hash is None:
|
|
|
|
continue
|
|
|
|
|
2018-09-02 11:04:29 +00:00
|
|
|
username = getattr(entity, "username", None)
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-03-26 11:41:00 +00:00
|
|
|
input_peer = types.InputPeerChannel(
|
2018-02-09 00:52:40 +00:00
|
|
|
channel_id=channel_id,
|
|
|
|
access_hash=access_hash
|
|
|
|
)
|
|
|
|
|
|
|
|
self.peers_by_id[peer_id] = input_peer
|
|
|
|
|
|
|
|
if username is not None:
|
2018-03-19 00:40:36 +00:00
|
|
|
self.peers_by_username[username.lower()] = input_peer
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
def download_worker(self):
|
|
|
|
name = threading.current_thread().name
|
|
|
|
log.debug("{} started".format(name))
|
|
|
|
|
|
|
|
while True:
|
2018-02-19 12:11:35 +00:00
|
|
|
media = self.download_queue.get()
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2018-02-19 12:11:35 +00:00
|
|
|
if media is None:
|
2018-02-18 17:11:33 +00:00
|
|
|
break
|
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
temp_file_path = ""
|
|
|
|
final_file_path = ""
|
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
try:
|
2018-05-06 13:45:42 +00:00
|
|
|
media, file_name, done, progress, progress_args, path = media
|
2018-03-20 13:05:41 +00:00
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
file_id = media.file_id
|
|
|
|
size = media.file_size
|
|
|
|
|
2018-03-21 12:39:23 +00:00
|
|
|
directory, file_name = os.path.split(file_name)
|
|
|
|
directory = directory or "downloads"
|
2018-03-20 13:27:44 +00:00
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
try:
|
|
|
|
decoded = utils.decode(file_id)
|
|
|
|
fmt = "<iiqqqqi" if len(decoded) > 24 else "<iiqq"
|
|
|
|
unpacked = struct.unpack(fmt, decoded)
|
|
|
|
except (AssertionError, binascii.Error, struct.error):
|
|
|
|
raise FileIdInvalid from None
|
|
|
|
else:
|
|
|
|
media_type = unpacked[0]
|
|
|
|
dc_id = unpacked[1]
|
|
|
|
id = unpacked[2]
|
|
|
|
access_hash = unpacked[3]
|
|
|
|
volume_id = None
|
|
|
|
secret = None
|
|
|
|
local_id = None
|
|
|
|
|
|
|
|
if len(decoded) > 24:
|
|
|
|
volume_id = unpacked[4]
|
|
|
|
secret = unpacked[5]
|
|
|
|
local_id = unpacked[6]
|
|
|
|
|
|
|
|
media_type_str = Client.MEDIA_TYPE_ID.get(media_type, None)
|
|
|
|
|
2018-05-01 16:15:33 +00:00
|
|
|
if media_type_str is None:
|
2018-04-15 23:07:02 +00:00
|
|
|
raise FileIdInvalid("Unknown media type: {}".format(unpacked[0]))
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
file_name = file_name or getattr(media, "file_name", None)
|
|
|
|
|
|
|
|
if not file_name:
|
|
|
|
if media_type == 3:
|
|
|
|
extension = ".ogg"
|
|
|
|
elif media_type in (4, 10, 13):
|
|
|
|
extension = mimetypes.guess_extension(media.mime_type) or ".mp4"
|
|
|
|
elif media_type == 5:
|
|
|
|
extension = mimetypes.guess_extension(media.mime_type) or ".unknown"
|
|
|
|
elif media_type == 8:
|
|
|
|
extension = ".webp"
|
|
|
|
elif media_type == 9:
|
|
|
|
extension = mimetypes.guess_extension(media.mime_type) or ".mp3"
|
2018-05-04 16:37:36 +00:00
|
|
|
elif media_type in (0, 1, 2):
|
2018-04-15 23:07:02 +00:00
|
|
|
extension = ".jpg"
|
|
|
|
else:
|
|
|
|
continue
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
file_name = "{}_{}_{}{}".format(
|
|
|
|
media_type_str,
|
2018-07-01 17:43:29 +00:00
|
|
|
datetime.fromtimestamp(
|
|
|
|
getattr(media, "date", None) or time.time()
|
|
|
|
).strftime("%Y-%m-%d_%H-%M-%S"),
|
2018-04-15 23:07:02 +00:00
|
|
|
self.rnd_id(),
|
|
|
|
extension
|
|
|
|
)
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-04-15 23:07:02 +00:00
|
|
|
temp_file_path = self.get_file(
|
|
|
|
dc_id=dc_id,
|
|
|
|
id=id,
|
|
|
|
access_hash=access_hash,
|
|
|
|
volume_id=volume_id,
|
|
|
|
local_id=local_id,
|
|
|
|
secret=secret,
|
|
|
|
size=size,
|
2018-05-06 13:45:42 +00:00
|
|
|
progress=progress,
|
|
|
|
progress_args=progress_args
|
2018-04-15 23:07:02 +00:00
|
|
|
)
|
2018-02-23 13:42:50 +00:00
|
|
|
|
2018-03-21 12:39:23 +00:00
|
|
|
if temp_file_path:
|
2018-03-21 15:17:13 +00:00
|
|
|
final_file_path = os.path.abspath(re.sub("\\\\", "/", os.path.join(directory, file_name)))
|
2018-03-21 14:42:32 +00:00
|
|
|
os.makedirs(directory, exist_ok=True)
|
|
|
|
shutil.move(temp_file_path, final_file_path)
|
2018-02-18 17:11:33 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
try:
|
2018-03-21 12:39:23 +00:00
|
|
|
os.remove(temp_file_path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# TODO: "" or None for faulty download, which is better?
|
|
|
|
# os.path methods return "" in case something does not exist, I prefer this.
|
|
|
|
# For now let's keep None
|
|
|
|
path[0] = final_file_path or None
|
|
|
|
finally:
|
|
|
|
done.set()
|
2018-03-20 20:20:04 +00:00
|
|
|
|
2018-02-18 17:11:33 +00:00
|
|
|
log.debug("{} stopped".format(name))
|
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
def updates_worker(self):
|
2018-02-08 19:46:47 +00:00
|
|
|
name = threading.current_thread().name
|
|
|
|
log.debug("{} started".format(name))
|
|
|
|
|
|
|
|
while True:
|
2018-02-13 11:08:10 +00:00
|
|
|
updates = self.updates_queue.get()
|
2018-02-08 19:46:47 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
if updates is None:
|
2018-02-08 19:46:47 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
try:
|
2018-02-13 11:08:10 +00:00
|
|
|
if isinstance(updates, (types.Update, types.UpdatesCombined)):
|
|
|
|
self.fetch_peers(updates.users)
|
|
|
|
self.fetch_peers(updates.chats)
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
for update in updates.updates:
|
2018-02-11 12:19:52 +00:00
|
|
|
channel_id = getattr(
|
|
|
|
getattr(
|
|
|
|
getattr(
|
2018-02-13 11:08:10 +00:00
|
|
|
update, "message", None
|
2018-02-11 12:19:52 +00:00
|
|
|
), "to_id", None
|
|
|
|
), "channel_id", None
|
2018-02-13 11:08:10 +00:00
|
|
|
) or getattr(update, "channel_id", None)
|
2018-02-11 12:19:52 +00:00
|
|
|
|
2018-02-13 11:08:10 +00:00
|
|
|
pts = getattr(update, "pts", None)
|
2018-03-19 00:08:34 +00:00
|
|
|
pts_count = getattr(update, "pts_count", None)
|
2018-07-04 12:03:14 +00:00
|
|
|
|
2018-07-03 16:29:25 +00:00
|
|
|
if isinstance(update, types.UpdateChannelTooLong):
|
|
|
|
log.warning(update)
|
2018-03-19 00:08:34 +00:00
|
|
|
|
|
|
|
if isinstance(update, types.UpdateNewChannelMessage):
|
2018-03-25 15:49:43 +00:00
|
|
|
message = update.message
|
|
|
|
|
|
|
|
if not isinstance(message, types.MessageEmpty):
|
2018-10-01 07:40:34 +00:00
|
|
|
try:
|
|
|
|
diff = self.send(
|
|
|
|
functions.updates.GetChannelDifference(
|
|
|
|
channel=self.resolve_peer(int("-100" + str(channel_id))),
|
|
|
|
filter=types.ChannelMessagesFilter(
|
|
|
|
ranges=[types.MessageRange(
|
|
|
|
min_id=update.message.id,
|
|
|
|
max_id=update.message.id
|
|
|
|
)]
|
|
|
|
),
|
|
|
|
pts=pts - pts_count,
|
|
|
|
limit=pts
|
|
|
|
)
|
2018-03-25 15:49:43 +00:00
|
|
|
)
|
2018-10-01 07:40:34 +00:00
|
|
|
except ChannelPrivate:
|
2018-10-01 07:45:32 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if not isinstance(diff, types.updates.ChannelDifferenceEmpty):
|
|
|
|
updates.users += diff.users
|
|
|
|
updates.chats += diff.chats
|
2018-02-11 12:19:52 +00:00
|
|
|
|
|
|
|
if channel_id and pts:
|
|
|
|
if channel_id not in self.channels_pts:
|
|
|
|
self.channels_pts[channel_id] = []
|
|
|
|
|
|
|
|
if pts in self.channels_pts[channel_id]:
|
|
|
|
continue
|
|
|
|
|
|
|
|
self.channels_pts[channel_id].append(pts)
|
|
|
|
|
|
|
|
if len(self.channels_pts[channel_id]) > 50:
|
|
|
|
self.channels_pts[channel_id] = self.channels_pts[channel_id][25:]
|
|
|
|
|
2018-04-06 16:48:41 +00:00
|
|
|
self.dispatcher.updates.put((update, updates.users, updates.chats))
|
2018-02-13 11:08:10 +00:00
|
|
|
elif isinstance(updates, (types.UpdateShortMessage, types.UpdateShortChatMessage)):
|
2018-02-10 15:30:13 +00:00
|
|
|
diff = self.send(
|
|
|
|
functions.updates.GetDifference(
|
2018-02-13 11:08:10 +00:00
|
|
|
pts=updates.pts - updates.pts_count,
|
|
|
|
date=updates.date,
|
2018-02-10 15:30:13 +00:00
|
|
|
qts=-1
|
2018-02-09 00:52:40 +00:00
|
|
|
)
|
2018-02-10 15:30:13 +00:00
|
|
|
)
|
2018-02-09 00:52:40 +00:00
|
|
|
|
2018-04-30 17:21:58 +00:00
|
|
|
if diff.new_messages:
|
|
|
|
self.dispatcher.updates.put((
|
|
|
|
types.UpdateNewMessage(
|
|
|
|
message=diff.new_messages[0],
|
|
|
|
pts=updates.pts,
|
|
|
|
pts_count=updates.pts_count
|
|
|
|
),
|
|
|
|
diff.users,
|
|
|
|
diff.chats
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
self.dispatcher.updates.put((diff.other_updates[0], [], []))
|
2018-02-13 11:08:10 +00:00
|
|
|
elif isinstance(updates, types.UpdateShort):
|
2018-04-06 16:48:41 +00:00
|
|
|
self.dispatcher.updates.put((updates.update, [], []))
|
2018-07-04 11:18:20 +00:00
|
|
|
elif isinstance(updates, types.UpdatesTooLong):
|
|
|
|
log.warning(updates)
|
2018-02-08 19:46:47 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
|
|
|
|
log.debug("{} stopped".format(name))
|
|
|
|
|
2018-06-24 15:47:29 +00:00
|
|
|
def send(self, data: Object, retries: int = Session.MAX_RETRIES, timeout: float = Session.WAIT_TIMEOUT):
|
2018-02-12 15:39:57 +00:00
|
|
|
"""Use this method to send Raw Function queries.
|
2017-12-29 20:44:45 +00:00
|
|
|
|
|
|
|
This method makes possible to manually call every single Telegram API method in a low-level manner.
|
2018-03-25 20:12:52 +00:00
|
|
|
Available functions are listed in the :obj:`functions <pyrogram.api.functions>` package and may accept compound
|
|
|
|
data types from :obj:`types <pyrogram.api.types>` as well as bare types such as ``int``, ``str``, etc...
|
2017-12-29 20:44:45 +00:00
|
|
|
|
|
|
|
Args:
|
2018-03-25 19:41:19 +00:00
|
|
|
data (``Object``):
|
2017-12-29 20:44:45 +00:00
|
|
|
The API Scheme function filled with proper arguments.
|
|
|
|
|
2018-06-24 15:47:29 +00:00
|
|
|
retries (``int``):
|
|
|
|
Number of retries.
|
|
|
|
|
|
|
|
timeout (``float``):
|
|
|
|
Timeout in seconds.
|
|
|
|
|
2017-12-30 18:23:18 +00:00
|
|
|
Raises:
|
2018-03-25 19:59:04 +00:00
|
|
|
:class:`Error <pyrogram.Error>`
|
2017-12-29 20:44:45 +00:00
|
|
|
"""
|
2018-04-03 09:40:08 +00:00
|
|
|
if not self.is_started:
|
|
|
|
raise ConnectionError("Client has not been started")
|
2018-02-26 14:44:08 +00:00
|
|
|
|
2018-06-24 15:47:29 +00:00
|
|
|
r = self.session.send(data, retries, timeout)
|
2018-02-26 14:44:08 +00:00
|
|
|
|
2018-04-03 09:40:08 +00:00
|
|
|
self.fetch_peers(getattr(r, "users", []))
|
|
|
|
self.fetch_peers(getattr(r, "chats", []))
|
|
|
|
|
|
|
|
return r
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
def load_config(self):
|
2018-01-16 21:05:19 +00:00
|
|
|
parser = ConfigParser()
|
2018-05-11 11:10:49 +00:00
|
|
|
parser.read(self.config_file)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-04-05 10:55:34 +00:00
|
|
|
if self.api_id and self.api_hash:
|
|
|
|
pass
|
2018-02-21 12:31:27 +00:00
|
|
|
else:
|
2018-04-05 10:55:34 +00:00
|
|
|
if parser.has_section("pyrogram"):
|
|
|
|
self.api_id = parser.getint("pyrogram", "api_id")
|
|
|
|
self.api_hash = parser.get("pyrogram", "api_hash")
|
|
|
|
else:
|
2018-04-12 12:11:01 +00:00
|
|
|
raise AttributeError(
|
|
|
|
"No API Key found. "
|
|
|
|
"More info: https://docs.pyrogram.ml/start/ProjectSetup#configuration"
|
|
|
|
)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-09-18 17:17:28 +00:00
|
|
|
for option in ["app_version", "device_model", "system_version", "lang_code"]:
|
2018-06-24 16:54:47 +00:00
|
|
|
if getattr(self, option):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if parser.has_section("pyrogram"):
|
|
|
|
setattr(self, option, parser.get(
|
|
|
|
"pyrogram",
|
|
|
|
option,
|
|
|
|
fallback=getattr(Client, option.upper())
|
|
|
|
))
|
2018-09-19 15:30:23 +00:00
|
|
|
else:
|
|
|
|
setattr(self, option, getattr(Client, option.upper()))
|
2018-06-24 16:47:49 +00:00
|
|
|
|
2018-05-25 09:52:40 +00:00
|
|
|
if self._proxy:
|
|
|
|
self._proxy["enabled"] = True
|
2018-04-13 10:30:13 +00:00
|
|
|
else:
|
2018-05-25 09:52:40 +00:00
|
|
|
self._proxy = {}
|
2018-04-13 10:30:13 +00:00
|
|
|
|
|
|
|
if parser.has_section("proxy"):
|
2018-05-25 09:52:40 +00:00
|
|
|
self._proxy["enabled"] = parser.getboolean("proxy", "enabled")
|
|
|
|
self._proxy["hostname"] = parser.get("proxy", "hostname")
|
|
|
|
self._proxy["port"] = parser.getint("proxy", "port")
|
|
|
|
self._proxy["username"] = parser.get("proxy", "username", fallback=None) or None
|
|
|
|
self._proxy["password"] = parser.get("proxy", "password", fallback=None) or None
|
2018-01-16 21:05:19 +00:00
|
|
|
|
2018-04-13 13:03:46 +00:00
|
|
|
def load_session(self):
|
2017-12-05 11:42:09 +00:00
|
|
|
try:
|
2018-04-19 08:06:41 +00:00
|
|
|
with open(os.path.join(self.workdir, "{}.session".format(self.session_name)), encoding="utf-8") as f:
|
2017-12-05 11:42:09 +00:00
|
|
|
s = json.load(f)
|
|
|
|
except FileNotFoundError:
|
|
|
|
self.dc_id = 1
|
2018-04-13 13:47:07 +00:00
|
|
|
self.date = 0
|
2018-06-13 11:38:14 +00:00
|
|
|
self.auth_key = Auth(self.dc_id, self.test_mode, self.ipv6, self._proxy).create()
|
2017-12-05 11:42:09 +00:00
|
|
|
else:
|
|
|
|
self.dc_id = s["dc_id"]
|
|
|
|
self.test_mode = s["test_mode"]
|
|
|
|
self.auth_key = base64.b64decode("".join(s["auth_key"]))
|
|
|
|
self.user_id = s["user_id"]
|
2018-04-13 13:47:07 +00:00
|
|
|
self.date = s.get("date", 0)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-04-13 13:20:37 +00:00
|
|
|
for k, v in s.get("peers_by_id", {}).items():
|
|
|
|
self.peers_by_id[int(k)] = utils.get_input_peer(int(k), v)
|
|
|
|
|
|
|
|
for k, v in s.get("peers_by_username", {}).items():
|
2018-04-13 13:47:07 +00:00
|
|
|
peer = self.peers_by_id.get(v, None)
|
|
|
|
|
|
|
|
if peer:
|
|
|
|
self.peers_by_username[k] = peer
|
2018-04-13 13:20:37 +00:00
|
|
|
|
|
|
|
for k, v in s.get("peers_by_phone", {}).items():
|
2018-04-13 13:47:07 +00:00
|
|
|
peer = self.peers_by_id.get(v, None)
|
|
|
|
|
|
|
|
if peer:
|
|
|
|
self.peers_by_phone[k] = peer
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
def save_session(self):
|
|
|
|
auth_key = base64.b64encode(self.auth_key).decode()
|
|
|
|
auth_key = [auth_key[i: i + 43] for i in range(0, len(auth_key), 43)]
|
|
|
|
|
2018-04-19 08:06:41 +00:00
|
|
|
os.makedirs(self.workdir, exist_ok=True)
|
|
|
|
|
|
|
|
with open(os.path.join(self.workdir, "{}.session".format(self.session_name)), "w", encoding="utf-8") as f:
|
2017-12-05 11:42:09 +00:00
|
|
|
json.dump(
|
|
|
|
dict(
|
|
|
|
dc_id=self.dc_id,
|
|
|
|
test_mode=self.test_mode,
|
|
|
|
auth_key=auth_key,
|
|
|
|
user_id=self.user_id,
|
2018-04-13 13:05:46 +00:00
|
|
|
date=self.date
|
2017-12-05 11:42:09 +00:00
|
|
|
),
|
|
|
|
f,
|
|
|
|
indent=4
|
|
|
|
)
|
|
|
|
|
2018-07-04 19:02:26 +00:00
|
|
|
def get_initial_dialogs_chunk(self, offset_date: int = 0):
|
2018-04-13 16:53:55 +00:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
r = self.send(
|
|
|
|
functions.messages.GetDialogs(
|
2018-07-04 12:03:45 +00:00
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=0,
|
|
|
|
offset_peer=types.InputPeerEmpty(),
|
|
|
|
limit=self.DIALOGS_AT_ONCE,
|
|
|
|
hash=0,
|
|
|
|
exclude_pinned=True
|
2018-04-13 16:53:55 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
except FloodWait as e:
|
|
|
|
log.warning("get_dialogs flood: waiting {} seconds".format(e.x))
|
|
|
|
time.sleep(e.x)
|
|
|
|
else:
|
|
|
|
log.info("Total peers: {}".format(len(self.peers_by_id)))
|
|
|
|
return r
|
2018-04-13 13:47:07 +00:00
|
|
|
|
2018-07-04 18:59:21 +00:00
|
|
|
def get_initial_dialogs(self):
|
2018-04-13 13:47:07 +00:00
|
|
|
self.send(functions.messages.GetPinnedDialogs())
|
2017-12-05 11:42:09 +00:00
|
|
|
|
2018-07-04 19:02:26 +00:00
|
|
|
dialogs = self.get_initial_dialogs_chunk()
|
2018-04-13 13:47:07 +00:00
|
|
|
offset_date = utils.get_offset_date(dialogs)
|
2017-12-05 11:42:09 +00:00
|
|
|
|
|
|
|
while len(dialogs.dialogs) == self.DIALOGS_AT_ONCE:
|
2018-07-04 19:02:26 +00:00
|
|
|
dialogs = self.get_initial_dialogs_chunk(offset_date)
|
2018-04-13 13:47:07 +00:00
|
|
|
offset_date = utils.get_offset_date(dialogs)
|
2018-04-02 10:14:22 +00:00
|
|
|
|
2018-07-04 19:02:26 +00:00
|
|
|
self.get_initial_dialogs_chunk()
|
2018-04-02 10:14:22 +00:00
|
|
|
|
2018-08-10 09:29:13 +00:00
|
|
|
def resolve_peer(self, peer_id: int or str):
|
|
|
|
"""Use this method to get the InputPeer of a known peer_id.
|
|
|
|
|
|
|
|
This is a utility method intended to be used only when working with Raw Functions (i.e: a Telegram API method
|
|
|
|
you wish to use which is not available yet in the Client class as an easy-to-use method), whenever an InputPeer
|
|
|
|
type is required.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
peer_id (``int`` | ``str``):
|
|
|
|
The peer id you want to extract the InputPeer from.
|
|
|
|
Can be a direct id (int), a username (str) or a phone number (str).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
On success, the resolved peer id is returned in form of an InputPeer object.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
:class:`Error <pyrogram.Error>`
|
|
|
|
"""
|
|
|
|
if type(peer_id) is str:
|
|
|
|
if peer_id in ("self", "me"):
|
|
|
|
return types.InputPeerSelf()
|
|
|
|
|
|
|
|
peer_id = re.sub(r"[@+\s]", "", peer_id.lower())
|
|
|
|
|
|
|
|
try:
|
|
|
|
int(peer_id)
|
|
|
|
except ValueError:
|
|
|
|
if peer_id not in self.peers_by_username:
|
|
|
|
self.send(functions.contacts.ResolveUsername(peer_id))
|
|
|
|
|
|
|
|
return self.peers_by_username[peer_id]
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
return self.peers_by_phone[peer_id]
|
|
|
|
except KeyError:
|
|
|
|
raise PeerIdInvalid
|
|
|
|
|
|
|
|
try: # User
|
|
|
|
return self.peers_by_id[peer_id]
|
|
|
|
except KeyError:
|
|
|
|
try: # Chat
|
|
|
|
return self.peers_by_id[-peer_id]
|
|
|
|
except KeyError:
|
|
|
|
try: # Channel
|
|
|
|
return self.peers_by_id[int("-100" + str(peer_id))]
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
raise PeerIdInvalid
|
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
def save_file(self,
|
|
|
|
path: str,
|
|
|
|
file_id: int = None,
|
|
|
|
file_part: int = 0,
|
|
|
|
progress: callable = None,
|
|
|
|
progress_args: tuple = ()):
|
|
|
|
part_size = 512 * 1024
|
|
|
|
file_size = os.path.getsize(path)
|
|
|
|
file_total_parts = int(math.ceil(file_size / part_size))
|
|
|
|
is_big = True if file_size > 10 * 1024 * 1024 else False
|
|
|
|
is_missing_part = True if file_id is not None else False
|
|
|
|
file_id = file_id or self.rnd_id()
|
|
|
|
md5_sum = md5() if not is_big and not is_missing_part else None
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-08-29 23:23:19 +00:00
|
|
|
session = Session(self, self.dc_id, self.auth_key, is_media=True)
|
2018-05-07 12:30:55 +00:00
|
|
|
session.start()
|
2018-01-23 14:18:52 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
try:
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
f.seek(part_size * file_part)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
while True:
|
|
|
|
chunk = f.read(part_size)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if not chunk:
|
|
|
|
if not is_big:
|
|
|
|
md5_sum = "".join([hex(i)[2:].zfill(2) for i in md5_sum.digest()])
|
|
|
|
break
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if is_big:
|
|
|
|
rpc = functions.upload.SaveBigFilePart(
|
|
|
|
file_id=file_id,
|
|
|
|
file_part=file_part,
|
|
|
|
file_total_parts=file_total_parts,
|
|
|
|
bytes=chunk
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
rpc = functions.upload.SaveFilePart(
|
|
|
|
file_id=file_id,
|
|
|
|
file_part=file_part,
|
|
|
|
bytes=chunk
|
|
|
|
)
|
2018-04-28 07:53:21 +00:00
|
|
|
|
2018-06-13 14:54:48 +00:00
|
|
|
assert session.send(rpc), "Couldn't upload file"
|
2017-12-30 18:23:18 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if is_missing_part:
|
|
|
|
return
|
2017-12-06 19:45:56 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if not is_big:
|
|
|
|
md5_sum.update(chunk)
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
file_part += 1
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if progress:
|
|
|
|
progress(self, min(file_part * part_size, file_size), file_size, *progress_args)
|
|
|
|
except Exception as e:
|
|
|
|
log.error(e, exc_info=True)
|
|
|
|
else:
|
|
|
|
if is_big:
|
|
|
|
return types.InputFileBig(
|
|
|
|
id=file_id,
|
|
|
|
parts=file_total_parts,
|
|
|
|
name=os.path.basename(path),
|
2018-04-30 11:13:30 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
return types.InputFile(
|
|
|
|
id=file_id,
|
|
|
|
parts=file_total_parts,
|
|
|
|
name=os.path.basename(path),
|
|
|
|
md5_checksum=md5_sum
|
|
|
|
)
|
|
|
|
finally:
|
|
|
|
session.stop()
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
def get_file(self,
|
|
|
|
dc_id: int,
|
|
|
|
id: int = None,
|
|
|
|
access_hash: int = None,
|
|
|
|
volume_id: int = None,
|
|
|
|
local_id: int = None,
|
|
|
|
secret: int = None,
|
|
|
|
version: int = 0,
|
|
|
|
size: int = None,
|
|
|
|
progress: callable = None,
|
|
|
|
progress_args: tuple = None) -> str:
|
|
|
|
with self.media_sessions_lock:
|
|
|
|
session = self.media_sessions.get(dc_id, None)
|
2017-12-29 20:44:45 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
if session is None:
|
|
|
|
if dc_id != self.dc_id:
|
|
|
|
exported_auth = self.send(
|
|
|
|
functions.auth.ExportAuthorization(
|
|
|
|
dc_id=dc_id
|
|
|
|
)
|
|
|
|
)
|
2017-12-30 18:23:18 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-05-07 12:30:55 +00:00
|
|
|
dc_id,
|
2018-08-29 23:23:19 +00:00
|
|
|
Auth(dc_id, self.test_mode, self.ipv6, self._proxy).create(),
|
2018-06-24 16:26:11 +00:00
|
|
|
is_media=True
|
2018-05-07 12:30:55 +00:00
|
|
|
)
|
2017-12-06 20:01:23 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
session.start()
|
2018-04-23 16:33:20 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
self.media_sessions[dc_id] = session
|
2018-04-23 16:33:20 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
session.send(
|
|
|
|
functions.auth.ImportAuthorization(
|
|
|
|
id=exported_auth.id,
|
|
|
|
bytes=exported_auth.bytes
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-05-07 12:30:55 +00:00
|
|
|
dc_id,
|
|
|
|
self.auth_key,
|
2018-06-24 16:26:11 +00:00
|
|
|
is_media=True
|
2018-05-07 12:30:55 +00:00
|
|
|
)
|
2018-04-23 16:33:20 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
session.start()
|
2017-12-14 08:34:58 +00:00
|
|
|
|
2018-05-07 12:30:55 +00:00
|
|
|
self.media_sessions[dc_id] = session
|
2018-05-01 19:08:47 +00:00
|
|
|
|
2017-12-23 11:26:26 +00:00
|
|
|
if volume_id: # Photos are accessed by volume_id, local_id, secret
|
|
|
|
location = types.InputFileLocation(
|
|
|
|
volume_id=volume_id,
|
|
|
|
local_id=local_id,
|
|
|
|
secret=secret
|
|
|
|
)
|
|
|
|
else: # Any other file can be more easily accessed by id and access_hash
|
|
|
|
location = types.InputDocumentFileLocation(
|
|
|
|
id=id,
|
|
|
|
access_hash=access_hash,
|
|
|
|
version=version
|
|
|
|
)
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-02-18 14:03:33 +00:00
|
|
|
limit = 1024 * 1024
|
2017-12-19 13:00:19 +00:00
|
|
|
offset = 0
|
2018-03-21 12:39:23 +00:00
|
|
|
file_name = ""
|
2017-12-19 13:00:19 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
r = session.send(
|
|
|
|
functions.upload.GetFile(
|
2017-12-23 11:26:26 +00:00
|
|
|
location=location,
|
2017-12-19 13:00:19 +00:00
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2017-12-20 15:21:56 +00:00
|
|
|
if isinstance(r, types.upload.File):
|
2018-03-30 20:41:34 +00:00
|
|
|
with tempfile.NamedTemporaryFile("wb", delete=False) as f:
|
2018-03-21 09:07:55 +00:00
|
|
|
file_name = f.name
|
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
while True:
|
|
|
|
chunk = r.bytes
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if not chunk:
|
|
|
|
break
|
2018-03-20 12:04:35 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
f.write(chunk)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
offset += limit
|
2017-12-20 15:21:56 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if progress:
|
2018-05-06 13:45:42 +00:00
|
|
|
progress(self, min(offset, size), size, *progress_args)
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
r = session.send(
|
|
|
|
functions.upload.GetFile(
|
|
|
|
location=location,
|
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
2017-12-20 15:21:56 +00:00
|
|
|
)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-21 08:01:18 +00:00
|
|
|
elif isinstance(r, types.upload.FileCdnRedirect):
|
2018-05-01 16:15:33 +00:00
|
|
|
with self.media_sessions_lock:
|
|
|
|
cdn_session = self.media_sessions.get(r.dc_id, None)
|
|
|
|
|
|
|
|
if cdn_session is None:
|
|
|
|
cdn_session = Session(
|
2018-06-24 16:26:11 +00:00
|
|
|
self,
|
2018-05-01 16:15:33 +00:00
|
|
|
r.dc_id,
|
2018-08-29 23:23:19 +00:00
|
|
|
Auth(r.dc_id, self.test_mode, self.ipv6, self._proxy).create(),
|
2018-06-24 16:26:11 +00:00
|
|
|
is_media=True,
|
2018-05-01 16:15:33 +00:00
|
|
|
is_cdn=True
|
|
|
|
)
|
|
|
|
|
2018-05-01 18:25:35 +00:00
|
|
|
cdn_session.start()
|
|
|
|
|
2018-05-01 19:08:47 +00:00
|
|
|
self.media_sessions[r.dc_id] = cdn_session
|
|
|
|
|
2017-12-19 13:00:19 +00:00
|
|
|
try:
|
2018-03-30 20:41:34 +00:00
|
|
|
with tempfile.NamedTemporaryFile("wb", delete=False) as f:
|
2018-03-21 09:07:55 +00:00
|
|
|
file_name = f.name
|
2018-03-21 12:39:23 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
while True:
|
|
|
|
r2 = cdn_session.send(
|
|
|
|
functions.upload.GetCdnFile(
|
|
|
|
file_token=r.file_token,
|
|
|
|
offset=offset,
|
|
|
|
limit=limit
|
|
|
|
)
|
2017-12-19 13:00:19 +00:00
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if isinstance(r2, types.upload.CdnFileReuploadNeeded):
|
|
|
|
try:
|
|
|
|
session.send(
|
|
|
|
functions.upload.ReuploadCdnFile(
|
|
|
|
file_token=r.file_token,
|
|
|
|
request_token=r2.request_token
|
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
)
|
2018-03-20 20:20:04 +00:00
|
|
|
except VolumeLocNotFound:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
continue
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
chunk = r2.bytes
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
# https://core.telegram.org/cdn#decrypting-files
|
2018-05-19 13:36:38 +00:00
|
|
|
decrypted_chunk = AES.ctr256_decrypt(
|
2018-03-20 20:20:04 +00:00
|
|
|
chunk,
|
|
|
|
r.encryption_key,
|
2018-05-19 13:36:38 +00:00
|
|
|
bytearray(
|
|
|
|
r.encryption_iv[:-4]
|
|
|
|
+ (offset // 16).to_bytes(4, "big")
|
|
|
|
)
|
2018-02-18 17:11:33 +00:00
|
|
|
)
|
2017-12-19 13:00:19 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
hashes = session.send(
|
|
|
|
functions.upload.GetCdnFileHashes(
|
|
|
|
r.file_token,
|
|
|
|
offset
|
|
|
|
)
|
|
|
|
)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
# https://core.telegram.org/cdn#verifying-files
|
|
|
|
for i, h in enumerate(hashes):
|
|
|
|
cdn_chunk = decrypted_chunk[h.limit * i: h.limit * (i + 1)]
|
|
|
|
assert h.hash == sha256(cdn_chunk).digest(), "Invalid CDN hash part {}".format(i)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
f.write(decrypted_chunk)
|
2018-02-18 14:03:33 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
offset += limit
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if progress:
|
2018-05-06 13:45:42 +00:00
|
|
|
progress(self, min(offset, size), size, *progress_args)
|
2018-02-24 16:16:25 +00:00
|
|
|
|
2018-03-20 20:20:04 +00:00
|
|
|
if len(chunk) < limit:
|
|
|
|
break
|
2017-12-19 13:00:19 +00:00
|
|
|
except Exception as e:
|
2018-03-20 20:20:04 +00:00
|
|
|
raise e
|
2017-12-19 13:00:19 +00:00
|
|
|
except Exception as e:
|
2018-03-20 20:20:04 +00:00
|
|
|
log.error(e, exc_info=True)
|
|
|
|
|
2018-03-21 08:01:18 +00:00
|
|
|
try:
|
|
|
|
os.remove(file_name)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2018-03-21 12:39:23 +00:00
|
|
|
|
|
|
|
return ""
|
2017-12-19 13:00:19 +00:00
|
|
|
else:
|
2018-03-20 20:20:04 +00:00
|
|
|
return file_name
|