Skip to content

Commit

Permalink
Update main_datascraper.py
Browse files Browse the repository at this point in the history
  • Loading branch information
UltimaHoarder committed Dec 22, 2020
1 parent 78a0a8c commit a631459
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions datascraper/main_datascraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
api_helper = OnlyFans.api_helper


def start_datascraper(json_config, site_name_lower,apis:list=[]):
def start_datascraper(json_config, site_name_lower, apis: list = []):
json_settings = json_config["settings"]
json_sites = json_config["supported"]
domain = json_settings["auto_site_choice"]
Expand All @@ -38,15 +38,15 @@ def start_datascraper(json_config, site_name_lower,apis:list=[]):
original_sessions = [x for x in original_sessions if x]
if not original_sessions:
print("Unable to create session")
return
return False
archive_time = timeit.default_timer()
if site_name_lower == "onlyfans":
site_name = "OnlyFans"
original_api = OnlyFans
module = m_onlyfans
if not apis:
apis = main_helper.process_profiles(
json_settings, original_sessions, site_name, original_api)
json_settings, original_sessions, site_name, original_api)
else:
for api in apis:
api.sessions = original_sessions
Expand Down Expand Up @@ -129,3 +129,4 @@ def start_datascraper(json_config, site_name_lower,apis:list=[]):
stop_time = str(
int(timeit.default_timer() - archive_time) / 60)[:4]
print('Archive Completed in ' + stop_time + ' Minutes')
return apis

0 comments on commit a631459

Please sign in to comment.