Skip to content

Commit

Permalink
Seperate paid content and paid (free) preview content
Browse files Browse the repository at this point in the history
Any paid post that has image previews will be marked as "Free".
  • Loading branch information
UltimaHoarder committed Feb 4, 2021
1 parent ed448d9 commit 0b8fc9d
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 1 deletion.
1 change: 1 addition & 0 deletions extras/OFRenamer/start.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def fix_directories(post):
option["date_format"] = date_format
option["text_length"] = text_length
option["directory"] = download_path
option["preview"] = media.preview
prepared_format = prepare_reformat(option)
file_directory = main_helper.reformat(
prepared_format, file_directory_format)
Expand Down
4 changes: 3 additions & 1 deletion helpers/main_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ def export_sqlite(archive_path, datas, parent_type, legacy_fixer=False):
media_db.media_id = media_id
media_db.post_id = post_id
media_db.link = media["links"][0]
media_db.preview = media.get("preview", False)
media_db.directory = media["directory"]
media_db.filename = media["filename"]
media_db.media_type = media["media_type"]
Expand Down Expand Up @@ -318,7 +319,8 @@ def reformat(prepared_format, unformatted):
extra_count = len("{text}")
if "{value}" in unformatted:
if prepared_format.price:
value = "Paid"
if not prepared_format.preview:
value = "Paid"
directory = prepared_format.directory
path = unformatted.replace("{site_name}", prepared_format.site_name)
path = path.replace("{post_id}", post_id)
Expand Down
12 changes: 12 additions & 0 deletions modules/onlyfans.py
Original file line number Diff line number Diff line change
Expand Up @@ -915,16 +915,21 @@ def media_scraper(results, api, formatted_directories, username, api_type, paren
rawText = media_api.get("rawText", "")
text = media_api.get("text", "")
final_text = rawText if rawText else text
previews = media_api.get("preview", None)
# if media_api["responseType"] == "post":
# if media_api["isArchived"]:
# pass
if api_type == "Messages":
media_api["rawText"] = media_api["text"]
previews = media_api.get("previews", None)
if api_type == "Mass Messages":
media_user = media_api["fromUser"]
media_username = media_user["username"]
if media_username != username:
continue
if previews == None:
# REMOVE BEFORE PUSHING COMMIT
input("PREVIEW NOT FOUND")
date = media_api["postedAt"] if "postedAt" in media_api else media_api["createdAt"]
if date == "-001-11-30T00:00:00+00:00":
date_string = master_date
Expand All @@ -939,13 +944,16 @@ def media_scraper(results, api, formatted_directories, username, api_type, paren
new_post["text"] = final_text
new_post["postedAt"] = date_string
new_post["paid"] = False
new_post["preview_media_ids"] = previews
price = new_post["price"] = media_api["price"]if "price" in media_api else None
if price == None:
price = 0
canPurchase = media_api.get("canPurchase", None)
if price:
if all(media["canView"] for media in media_api["media"]):
new_post["paid"] = True
else:
print
for media in media_api["media"]:
media_id = media["id"]
date = "-001-11-30T00:00:00+00:00"
Expand Down Expand Up @@ -1000,6 +1008,9 @@ def media_scraper(results, api, formatted_directories, username, api_type, paren
new_media["media_id"] = media_id
new_media["links"] = []
new_media["media_type"] = media_type
new_media["preview"] = False
if int(media_id) in new_post["preview_media_ids"]:
new_media["preview"] = True
for xlink in link, preview_link:
if xlink:
new_media["links"].append(xlink)
Expand Down Expand Up @@ -1028,6 +1039,7 @@ def media_scraper(results, api, formatted_directories, username, api_type, paren
option["date_format"] = date_format
option["text_length"] = text_length
option["directory"] = download_path
option["preview"] = new_media["preview"]

prepared_format = prepare_reformat(option)
file_directory = main_helper.reformat(
Expand Down

0 comments on commit 0b8fc9d

Please sign in to comment.