diff --git a/apis/api_helper.py b/apis/api_helper.py index 05c38a72d..b6a598527 100644 --- a/apis/api_helper.py +++ b/apis/api_helper.py @@ -107,6 +107,15 @@ def json_request(link, session, method="GET", stream=False, json_format=True, da return result +def multiprocessing(): + max_threads = global_settings["max_threads"] + if max_threads < 1: + pool = ThreadPool() + else: + pool = ThreadPool(max_threads) + return pool + + def create_session(settings={}, custom_proxy="", test_ip=True): sessions = [requests.Session()] settings = set_settings(settings) @@ -147,15 +156,6 @@ def set_sessions(proxy): return sessions -def multiprocessing(): - max_threads = global_settings["max_threads"] - if max_threads < 1: - pool = ThreadPool() - else: - pool = ThreadPool(max_threads) - return pool - - def copy_sessions(original_sessions): sessions = [] for original_session in original_sessions: diff --git a/modules/onlyfans.py b/modules/onlyfans.py index 23383b4ce..9feb92573 100644 --- a/modules/onlyfans.py +++ b/modules/onlyfans.py @@ -148,7 +148,6 @@ def scrape_choice(api, subscription): user_id = subscription.id post_count = subscription.postsCount archived_count = subscription.archivedPostsCount - s = subscription media_types = ["Images", "Videos", "Audios", "Texts"] if auto_choice: input_choice = auto_choice @@ -268,7 +267,7 @@ def paid_content_scraper(api): results = [] for paid_content in paid_contents: author = paid_content.get("author") - author = paid_content.get("fromUser",author) + author = paid_content.get("fromUser", author) subscription = create_subscription(author) subscription.sessions = api.sessions subscription.download_info["directory"] = j_directory