async def simultaneous_chunked_download(urls_paths, label): timeout = ClientTimeout(total=60000) sem = asyncio.Semaphore(5) async with aiohttp.ClientSession(timeout=timeout, connector=aiohttp.TCPConnector(verify_ssl=False)) as cs: async def _fetch(r, path): async with sem: async with aiofiles.open(path, "wb") as f: async for chunk in r.content.iter_any(): if not chunk: break size = await f.write(chunk) if not indeterminate: bar._done += size bar.show(bar._done) if indeterminate: bar._done += 1 bar.show(bar._done) indeterminate = False total_length = 0 tasks = [] for url, path in urls_paths.items(): r = await cs.get(url) if not indeterminate: try: total_length += r.content_length except Exception: indeterminate = True tasks.append(_fetch(r, path)) verbose_print(f"url: {url},\npath: {path}\n\n") if not indeterminate: bar = progress.Bar( expected_size=total_length, label=label, width=28, hide=False ) else: bar = progress.Bar( expected_size=len(tasks), label=label, width=28, hide=False ) logger._pause_file_output = True bar.show(0) bar._done = 0 await asyncio.gather(*tasks) logger._pause_file_output = False bar.done()The function I have above is for downloading a dictionary of urls asynchronously and then printing out a progress bar. An example of its usage:
The code itself runs perfectly fine, however i keep getting these errors:
Whilst benign, they are an eyesore and could point towards my lack of knowledge on both http and asynchronous code, so i would rather try and get it fixed. However im at a loss on where or what is causing it, especially as i like i said the code runs perfectly fine regardless.
If you would like a more practical hands on attempt at recreating this the full code is on my github repo on the dev branch: https://github.com/ohitstom/spicetify-easyinstall/tree/dev
Most of the program can be disregarding if you are testing this out, just press the install button and the problematic code will show itself towards the end.Bare in mind this is a spotify themer so if you have spotify/spicetify installed you will want to use a vm.

