44 lines
1.2 KiB
Python
44 lines
1.2 KiB
Python
from concurrent.futures import ThreadPoolExecutor
|
|
import pyperclip
|
|
import cloudscraper
|
|
from tqdm import tqdm
|
|
|
|
|
|
concurrent_downloads = 5
|
|
|
|
|
|
scraper = cloudscraper.CloudScraper()
|
|
failed = []
|
|
|
|
|
|
def download(url: str):
|
|
try:
|
|
filename = url.split("/")[-1].split("?")[0]
|
|
|
|
res = scraper.get(url, stream=True)
|
|
if res.status_code != 200:
|
|
raise Exception(url + " : " + res.reason)
|
|
total_size = int(res.headers.get("content-length", 0))
|
|
|
|
with tqdm(desc=filename, total=total_size, unit="B", unit_scale=True) as progress_bar:
|
|
with open(filename, "wb") as file:
|
|
for data in res.iter_content(1024):
|
|
progress_bar.update(len(data))
|
|
file.write(data)
|
|
|
|
except Exception as ex:
|
|
failed.append(url)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
with ThreadPoolExecutor(max_workers=concurrent_downloads) as exe:
|
|
for line in pyperclip.paste().splitlines():
|
|
line = line.strip()
|
|
if line == "":
|
|
continue
|
|
exe.submit(download, line)
|
|
|
|
if len(failed) != 0:
|
|
with open("failed.log", "w") as file:
|
|
for url in failed:
|
|
file.write(url+"\n") |