diff --git a/py34/list.py b/py34/list.py index 2d46627..b02cc16 100644 --- a/py34/list.py +++ b/py34/list.py @@ -2,7 +2,7 @@ from .post import Post from .scraper import scraper, ScraperException, CaptchaException from .url import parse_thumbnail_url from .dockid import is_nochick, is_toodeep, is_captcha -from .thread import Pool +from concurrent.futures import ThreadPoolExecutor import urllib.parse from retry import retry @@ -60,10 +60,9 @@ class List: # Download thumbnails if fetch_thumbnails: - pool = Pool() - for post in self.posts: - pool.submit(Post.get_thumbnail_data, post) - pool.join() + with ThreadPoolExecutor(max_workers=5) as pool: + for post in self.posts: + pool.submit(Post.get_thumbnail_data, post) except ScraperException as ex: raise ex diff --git a/py34/thread.py b/py34/thread.py deleted file mode 100644 index a15b37b..0000000 --- a/py34/thread.py +++ /dev/null @@ -1,37 +0,0 @@ -from threading import Thread -from typing import Callable - - -class Pool: - def __init__(self, max_workers: int = 5): - self.max_workers = max_workers - self.jobs: list[Thread] = [] - self.workers: list[Thread] = [] - - def submit(self, func: Callable, *vargs, **kwargs): - def proc(self, func: Callable, *vargs, **kwargs): - func(*vargs, **kwargs) - self._pool_proc() - - self.jobs.append(Thread( - target = proc, - args = (self, func, *vargs, ), - kwargs = kwargs - )) - - self._pool_proc() - - def join(self): - while len(self.workers) != 0: - self.workers[-1].join() - self._pool_proc() - - def _pool_proc(self): - # Remove any dead workers - self.workers = list(filter(Thread.is_alive, self.workers)) - - # Process jobs if any - while len(self.workers) < self.max_workers and len(self.jobs) != 0: - job = self.jobs.pop() - job.start() - self.workers.append(job)