Tweak file upload settings

Multiple sessions as used in the current implementation were causing a
variety of network related issues. Use one session only instead. Multiple
workers within the same session are fine as long as they are not too many,
otherwise the server will start replying with -429 (too many requests).
Setting the queue size to 1 helps in having a more linear upload progress.
This commit is contained in:
Dan 2022-12-27 14:55:07 +01:00
parent 9bf742abc0
commit dc3b8a5e01

View File

@ -134,23 +134,19 @@ class SaveFile:
file_total_parts = int(math.ceil(file_size / part_size))
is_big = file_size > 10 * 1024 * 1024
pool_size = 3 if is_big else 1
workers_count = 4 if is_big else 1
is_missing_part = file_id is not None
file_id = file_id or self.rnd_id()
md5_sum = md5() if not is_big and not is_missing_part else None
pool = [
Session(
self, await self.storage.dc_id(), await self.storage.auth_key(),
await self.storage.test_mode(), is_media=True
) for _ in range(pool_size)
]
workers = [self.loop.create_task(worker(session)) for session in pool for _ in range(workers_count)]
queue = asyncio.Queue(16)
session = Session(
self, await self.storage.dc_id(), await self.storage.auth_key(),
await self.storage.test_mode(), is_media=True
)
workers = [self.loop.create_task(worker(session)) for _ in range(workers_count)]
queue = asyncio.Queue(1)
try:
for session in pool:
await session.start()
await session.start()
fp.seek(part_size * file_part)
@ -223,8 +219,7 @@ class SaveFile:
await asyncio.gather(*workers)
for session in pool:
await session.stop()
await session.stop()
if isinstance(path, (str, PurePath)):
fp.close()