-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathexternal.py
869 lines (759 loc) · 35.8 KB
/
external.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""The external component contains testing routines, targeted checks, as
well as a monitor routine that is meant to be run separately to audit
the uptime of the bot. Since these routines are not part of the
regular runtime, they tend to be less documented or updated, and the
code may not be the cleanest.
"""
import datetime
import os
import sqlite3
import sys
import time
from ast import literal_eval
from calendar import monthrange
from collections import Counter
from random import sample
from shutil import copy
import praw
import prawcore
import yaml
import artemis_stats
import connection
import database
import timekeeping
from common import logger
from settings import INFO, FILE_ADDRESS, SETTINGS, SOURCE_FOLDER
"""LOGGING IN"""
USER_AGENT = "Artemis Monitor, a service routine for this account."
connection.login(posts_frequency=False)
reddit = connection.reddit
reddit_helper = connection.reddit_helper
reddit_monitor = praw.Reddit(
client_id=INFO.monitor_app_id,
client_secret=INFO.monitor_app_secret,
password=INFO.monitor_password,
user_agent=USER_AGENT,
username=INFO.monitor_username,
)
# Manually define additional database for stats compilation.
# Temporary until a more sustainable method can be written.
stats_address_1 = "{}{}.db".format(FILE_ADDRESS.data_stats[:-3], 1)
main_address_1 = "{}{}.db".format(FILE_ADDRESS.data_main[:-3], 1)
CONN_STATS_1 = sqlite3.connect(stats_address_1)
CURSOR_STATS_1 = CONN_STATS_1.cursor()
CONN_MAIN_1 = sqlite3.connect(main_address_1)
CURSOR_MAIN_1 = CONN_MAIN_1.cursor()
"""MONITOR FUNCTIONS"""
def monitor_seconds_till_next_hour():
"""Function to determine seconds until the next hour to act.
The monitor uses this time to wait for that period, thus
running itself at the same time each hour.
:return: Returns the number of seconds remaining until the next
action time as an integer.
"""
# Returns the current Unix timestamp.
current_waktu = int(time.time())
w = time.strftime("%Y:%m:%d:%H")
# Choose the next time to run.
next_time = int(time.mktime(datetime.datetime.strptime(w, "%Y:%m:%d:%H").timetuple())) + 3600
seconds_remaining = (SETTINGS.monitor_time_check * 60) + next_time - current_waktu
return seconds_remaining
def monitor_wiki_access(date=None):
"""This function used by the monitor accesses a page on the wiki,
where dates on which an outage was detected are stored. This is to
prevent multiple messages on the same day for an outage. The
function adds an date of an action if passed that value, otherwise
it does nothing and just gets back already saved dates as a list.
:param date: The date on which an outage was detected. Passed as a
string in YYYY-MM-DD format.
:return: A Python list of all dates on which outages were detected.
"""
wiki_page = reddit_monitor.subreddit(SETTINGS.wiki).wiki["artemis_monitor"]
processed_data = wiki_page.content_md
# Convert YAML processed text into a Python list.
processed_dates = yaml.safe_load(processed_data)
# If it's not none, we wanna add a date to the list.
if date is not None and date not in processed_dates:
processed_dates.append(date)
wiki_page.edit(
content=str(processed_dates), reason="Updating with new date `{}`.".format(date)
)
logger.info("Monitor Wiki Access: Updated monitor log with date {}.".format(date))
return processed_dates
def monitor_last_log_checker():
"""This is a function of "last resort" - the monitor accesses the
mod log using the main account and checks to see when the last mod
action was performed. This is in case the widgets aren't being
updated regularly, so the monitor checks to make sure the bot is
actually doing things.
:return: `None`.
"""
item_elapsed = None
log_entry = None
for log in reddit.subreddit("mod").mod.log(limit=1, mod=INFO.username[:12]):
item_elapsed = round(time.time() - log.created_utc, 2)
log_entry = " * **Last main action**: `{}` performed on " "r/{}.".format(
log.action, log.subreddit, item_elapsed
)
return item_elapsed, log_entry
def monitor_main():
"""This is the main monitor function. Hosted separately, it
primarily checks r/AssistantBOT's modlog to see if there are any
recent items in the mod log by the main routine. Since the main
routine regularly updates a status widget, it's a way of verifying
that the bot is still active and running. If it has been longer than
`monitor_time_interval` without any log items, the bot will check
the account itself to see if it has conducted any actions, and if it
hasn't the monitor will send creator a message.
:return: `None`.
"""
current_time = int(time.time())
current_utc = timekeeping.time_convert_to_string(current_time)
current_date = datetime.datetime.utcfromtimestamp(current_time).strftime("%Y-%m-%d")
# Fetch the dates on which notifications have already been sent
# about down times.
done_dates = monitor_wiki_access()
# Access the bot subreddit and check for the most recent mod log.
most_recent_item_time = None
for item in reddit.subreddit(INFO.username[:12]).mod.log(limit=1, mod=INFO.username[:12]):
most_recent_item_time = int(item.created_utc)
# If the log is inaccessible, return.
if most_recent_item_time is None:
logger.info("Monitor: Unable to retrieve anything from the moderation log.")
return
else:
time_diff_mins = round((current_time - most_recent_item_time) / 60, 2)
logger.info(
"Monitor: Time difference since " "last log entry: {} minutes".format(time_diff_mins)
)
if time_diff_mins > SETTINGS.monitor_time_interval:
logger.info(
"Monitor: Time interval exceeded. It's been over {} minutes since the last "
"r/{} update.".format(time_diff_mins, INFO.username[:12])
)
logger.info(
"Monitor: The current time interval to check is "
"{} minutes.".format(SETTINGS.monitor_time_interval)
)
# Get the operational status widget.
operational_widget = None
for widget in reddit.subreddit(INFO.username[:12]).widgets.sidebar:
if isinstance(widget, praw.models.TextArea):
if widget.id == SETTINGS.widget_operational_status:
operational_widget = widget
break
# Check to see if this particular date was already accounted
# for in the wiki and recorded.
if current_date not in done_dates:
# We conduct a final check by also consulting the main
# mod log to see when the last action was. `last_elapsed`
# is the number of seconds since the last action.
last_elapsed, last_main_log_msg = monitor_last_log_checker()
# Message my creator if the date is not recorded.
msg = (
"* **Last [log entry](https://www.reddit.com/r/AssistantBOT/about/log)"
" in r/AssistantBOT**: Recorded {} minutes ago on {}. Current "
"minimum interval is {} minutes.".format(
time_diff_mins, current_date, SETTINGS.monitor_time_interval
)
)
# If the last actual mod action is longer than our interval
# message my creator.
if last_elapsed >= SETTINGS.monitor_time_interval * 60:
main_log_chunk = (
"\n\n* **Last main action**: Recorded {:,.2f} minutes ago."
"\n\n{}".format(last_elapsed / 60, last_main_log_msg)
)
reddit.redditor(INFO.creator).message("Artemis may be down.", msg + main_log_chunk)
logger.info("Monitor: Messaged creator about possible downtime.")
# Add the current date to the wiki.
monitor_wiki_access(current_date)
else:
# The mod log indicates that the bot is not actually
# down, so exit early.
return
if operational_widget is not None:
operational_status = "# ❎ {}".format(current_utc)
operational_widget.mod.update(
text=operational_status,
styles={"backgroundColor": "#ed1c24", "headerColor": "#222222"},
)
logger.info(
"Monitor: Updated operational status widget with "
"down notice at {} UTC.".format(current_utc)
)
return
"""TESTING / EXTERNAL FUNCTIONS"""
def external_backup_daily():
"""This function backs up the database files to a secure Box account
and a local target. It does not back up the credentials file or the
main Artemis file itself. This is called by a cron job daily.
:return: Nothing.
"""
current_day = timekeeping.convert_to_string(time.time())
# Iterate over the backup paths that are listed.
for backup_path in [INFO.backup_folder, INFO.backup_folder_2]:
if not os.path.isdir(backup_path):
# If the web disk or the physical disk is not mounted,
# record an error.
logger.error(
"Backup: It appears that the backup disk "
"at {} is not mounted.".format(backup_path)
)
else:
# Mounted successfully. Create a new folder in the
# YYYY-MM-DD format.
new_folder_path = "{}/{}".format(backup_path, current_day)
# If there already is a folder with today's date, do not
# do anything. Otherwise, start the backup process.
if os.path.isdir(new_folder_path):
logger.info(
"Backup: Backup folder for {} "
"already exists at {}.".format(current_day, backup_path)
)
else:
# Create the new target folder and get the list of files
# from the home folder.
os.makedirs(new_folder_path)
source_files = os.listdir(SOURCE_FOLDER)
# We don't need to back up files with these file name
# extensions. Exclude them from backup.
xc = ["journal", ".json", ".out", ".py", ".yaml"]
source_files = [x for x in source_files if not any(keyword in x for keyword in xc)]
# Iterate over each file and back it up.
for file_name in source_files:
# Ignore period-prefixed temporary files.
if file_name.startswith("."):
continue
# Get the full path of the file.
full_file_name = os.path.join(SOURCE_FOLDER, file_name)
# If the file exists, try backing it up. If there
# happens to be a copying error, skip the file.
if os.path.isfile(full_file_name):
try:
copy(full_file_name, new_folder_path)
except OSError:
pass
logger.info("Backup: Completed for {}.".format(current_day))
return
def external_random_test(query):
"""Fetch initialization information for a random selection of
non-local subreddits. This is used to test the process and procedure
of adding new subreddits to the bot's monitored database.
"""
already_monitored = database.monitored_subreddits_retrieve()
if query == "random":
# Choose a number of random subreddits to test. There is code
# here to alternately try to get an alternative if the random
# one is already being monitored.
random_subs = []
num_initialize = int(input("\nEnter the number of random subreddits to initialize: "))
for _ in range(num_initialize):
# noinspection PyUnboundLocalVariable
first_retrieve = reddit.random_subreddit().display_name.lower()
if first_retrieve not in already_monitored:
random_subs.append(first_retrieve)
else:
random_subs.append(reddit.random_subreddit().display_name.lower())
random_subs.sort()
print("\n\n### Now testing: r/{}.\n".format(", r/".join(random_subs)))
init_times = []
for test_sub in random_subs:
print("\n\n### Initializing data for r/{}...\n".format(test_sub))
starting = time.time()
artemis_stats.initialization(test_sub, create_wiki=False)
generated_text = artemis_stats.wikipage_collater(test_sub)
artemis_stats.wikipage_editor_local(test_sub, generated_text)
elapsed = (time.time() - starting) / 60
init_times.append(elapsed)
print(
"\n\n# r/{} data ({:.2f} mins):\n\n{}\n\n---".format(
test_sub, elapsed, generated_text
)
)
print(
"\n\n### All {} initialization tests complete. "
"Average initialization time: {:.2f} mins".format(
num_initialize, sum(init_times) / len(init_times)
)
)
else:
# Initialize the data for the sub.
logger.info("Manually intializing data for r/{}.".format(query))
time_initialize_start = time.time()
artemis_stats.initialization(query, create_wiki=False)
initialized = time.time() - time_initialize_start
print("\n---\n\nInitialization time: {:.2f} minutes".format(initialized / 60))
# Generate and print the collated data just as the wiki page
# would look like.
print(artemis_stats.wikipage_collater(query))
elapsed = time.time() - time_initialize_start
print("\nTotal elapsed time: {:.2f} minutes".format(elapsed / 60))
return
def external_local_test(query):
"""Fetch initialization information for a random selection of
locally stored subeddits.
"""
# Now begin to test the collation by running the
# function, making sure there are no errors.
if query == "random":
# Fetch all the subreddits we monitor and ask for
# the number to test.
number_to_test = int(input("\nEnter the number of tests to conduct: "))
random_subs = sample(database.monitored_subreddits_retrieve(), number_to_test)
random_subs.sort()
print("\n\n### Now testing: r/{}.\n".format(", r/".join(random_subs)))
init_times = []
for test_sub in random_subs:
time_initialize_start = time.time()
print("\n---\n\n> Testing r/{}...\n".format(test_sub))
# If the length of the generated text is longer than a
# certain amount, then it's passed.
tested_data = artemis_stats.wikipage_collater(test_sub)
if len(tested_data) > 1000:
total_time = time.time() - time_initialize_start
artemis_stats.wikipage_editor_local(test_sub, tested_data)
print(
"\n> Test complete for r/{} in {:.2f} seconds.\n".format(test_sub, total_time)
)
init_times.append(total_time)
print(
"\n\n# All {} wikipage collater tests complete. "
"Average initialization time: {:.2f} secs".format(
number_to_test, sum(init_times) / len(init_times)
)
)
else:
logger.info("Testing data for r/{}.".format(query))
print(artemis_stats.wikipage_collater(query))
return
def external_artemis_monthly_statistics(month_string):
"""This function collects various statistics on the bot's actions
over a certain month and returns them as a Markdown segment.
:param month_string: A month later than December 2019, expressed as
YYYY-MM.
:return: A Markdown segment of text.
"""
list_of_days = []
list_of_actions = []
list_of_lines = []
list_of_posts = []
added_subreddits = {}
formatted_subreddits = []
actions = {}
actions_s_master = {}
actions_m_master = {}
actions_total = {}
actions_flaired = {}
posts = {}
# Omit these actions from the chart.
omit_actions = ["Removed as moderator"]
# Get the UNIX times that bound our month.
year, month = month_string.split("-")
first_day = month_string + "-01"
first_day_unix = timekeeping.convert_to_unix(first_day)
start_time = timekeeping.convert_to_unix(month_string + "-01")
end_time = "{}-{}".format(month_string, monthrange(int(year), int(month))[1])
end_time = timekeeping.convert_to_unix(end_time) + 86399
# Get the subreddits that were added during this month.
for post in reddit_helper.redditor(INFO.username + "1").submissions.new(limit=100):
if "accepted" in post.title.lower() and end_time >= post.created_utc >= start_time:
new_sub = post.title.split("r/")[1]
added_subreddits[new_sub] = post.over_18
for subreddit in added_subreddits:
# Make an exception for banned subreddits, particularly.
try:
subreddit_object = reddit.subreddit(subreddit)
subreddit_type = subreddit_object.subreddit_type
except prawcore.exceptions.NotFound:
logger.info("Subreddit r/{} appears to be banned.".format(subreddit))
continue
except prawcore.exceptions.Forbidden:
logger.info("Subreddit r/{} appears to be private.".format(subreddit))
continue
if subreddit_type not in ["public", "restricted"]:
continue
else:
is_nsfw = added_subreddits[subreddit]
if is_nsfw:
formatted_subreddits.append(subreddit + " (NSFW)")
else:
formatted_subreddits.append(subreddit)
formatted_subreddits.sort(key=lambda y: y.lower())
added_section = title = "\n# Artemis Overall Statistics — {}".format(month_string)
added_section += "\n\n### Added Subreddits\n\n" "* r/{}".format(
"\n* r/".join(formatted_subreddits)
)
added_section += "\n* **Total**: {} public subreddits".format(len(formatted_subreddits))
# Get the actions from during this time period.
database.CURSOR_STATS.execute("SELECT * FROM subreddit_actions WHERE subreddit == ?", ("all",))
actions_s = literal_eval(database.CURSOR_STATS.fetchone()[1])
database.CURSOR_MAIN.execute("SELECT * FROM subreddit_actions WHERE subreddit == ?", ("all",))
actions_m = literal_eval(database.CURSOR_MAIN.fetchone()[1])
# Get the actions from the other instance during this time period.
CURSOR_STATS_1.execute("SELECT * FROM subreddit_actions WHERE subreddit == ?", ("all",))
actions_s_1 = literal_eval(CURSOR_STATS_1.fetchone()[1])
CURSOR_MAIN_1.execute("SELECT * FROM subreddit_actions WHERE subreddit == ?", ("all",))
actions_m_1 = literal_eval(CURSOR_MAIN_1.fetchone()[1])
# Combine the two database instances together.
for date in actions_s:
# Exclude entries for dates prior to our search parameters.
if timekeeping.convert_to_unix(date) < first_day_unix:
continue
if date in actions_s_1:
try:
actions_s_master[date] = dict(
Counter(actions_s[date]) + Counter(actions_s_1[date])
)
except TypeError:
logger.info(f"Error in stats dictionary encountered on {date}.")
if date in actions_m_1:
try:
actions_m_master[date] = dict(
Counter(actions_m[date]) + Counter(actions_m_1[date])
)
except TypeError:
logger.info(f"Error in main dictionary encountered on {date}.")
# Combine the actions together in a single dictionary.
all_days = list(set(list(actions_s_master.keys()) + list(actions_m_master.keys())))
all_days.sort()
subset_days = all_days[all_days.index(first_day) :]
for day in subset_days:
if day in actions_s_master:
actions[day] = dict(Counter(actions_s_master[day]) + Counter(actions_m_master[day]))
elif day in actions_m_master and day not in actions_s_master:
actions[day] = Counter(actions_m[day])
# Iterate over the days and actions in the actions dictionaries.
for day in actions:
if end_time >= timekeeping.convert_to_unix(day) >= start_time:
list_of_days.append(day)
for action in actions[day]:
if action not in list_of_actions and action not in omit_actions:
list_of_actions.append(action)
# Sort and form the header.
list_of_actions.sort()
list_of_days.sort()
for action in list_of_actions:
actions_total[action] = 0
header = "Date | " + " | ".join(list_of_actions)
divider = "----|---" * len(list_of_actions)
# Iterate over the days and form line-by-line actions.
for day in list_of_days:
day_data = actions[day]
formatted_components = []
for action in list_of_actions:
if action in day_data:
formatted_components.append("{:,}".format(day_data[action]))
actions_total[action] += day_data[action]
if action.startswith("Flaired"):
actions_flaired[day] = day_data[action]
else:
formatted_components.append("---")
day_line = "| {} | {} ".format(day, " | ".join(formatted_components))
list_of_lines.append(day_line)
# Sum up the total number of actions as a final line.
formatted_components = []
for action in list_of_actions:
formatted_components.append("{:,}".format(actions_total[action]))
total_line = "| **Total** | {} ".format(" | ".join(formatted_components))
list_of_lines.append(total_line)
# Calculate the result of actions upon messages. This will also
# calculate the percentage of each action per day.
messages_dict = {}
messages_lines = []
messages_header = (
"### Daily Flairing Messages\n\n"
"| Date | Total messages | Directly flaired | Fuzzed | Matched | Passed |\n"
"|------|----------------|------------------|--------|---------|--------|\n"
)
with open(FILE_ADDRESS.messages, "r", encoding="utf-8") as f:
messages_data = f.read()
messages_list = messages_data.split("\n")[2:] # Skip table headers.
# Process the messages list into a dictionary indexed by date.
# Within each date entry is a dictionary with actions.
for entry in messages_list:
message_date = entry.split("|")[1].strip()
message_action = entry.split("|")[5].strip()
if message_date in messages_dict:
if message_action in messages_dict[message_date]:
messages_dict[message_date][message_action] += 1
else:
messages_dict[message_date][message_action] = 1
else:
messages_dict[message_date] = {message_action: 1}
message_line = (
"| {} | {} | **{}** ({:.0%}) | **{}** ({:.0%}) " "| **{}** ({:.0%}) | **{}** ({:.0%}) |"
)
for day in list_of_days:
successful_count = actions_flaired[day]
if day in messages_dict:
fuzzed_count = messages_dict[day].get("Fuzzed", 0)
matched_count = messages_dict[day].get("Matched", 0)
passed_count = messages_dict[day].get("None", 0)
total = successful_count + fuzzed_count + matched_count + passed_count
else:
fuzzed_count = matched_count = passed_count = 0
total = int(successful_count)
line = message_line.format(
day,
total,
successful_count,
successful_count / total,
fuzzed_count,
fuzzed_count / total,
matched_count,
matched_count / total,
passed_count,
passed_count / total,
)
messages_lines.append(line)
messages_body = messages_header + "\n".join(messages_lines)
# Collect the number of posts across ALL subreddits.
posts_total = 0
database.CURSOR_STATS.execute("SELECT * FROM subreddit_stats_posts")
stats_results = database.CURSOR_STATS.fetchall()
for entry in stats_results:
sub_data = literal_eval(entry[1])
for day in list_of_days:
if day not in sub_data:
continue
day_amount = sum(sub_data[day].values())
if day in posts:
posts[day] += day_amount
else:
posts[day] = day_amount
posts_total += day_amount
# Collect it across the second instance.
CURSOR_STATS_1.execute("SELECT * FROM subreddit_stats_posts")
stats_results_1 = CURSOR_STATS_1.fetchall()
for entry in stats_results_1:
sub_data = literal_eval(entry[1])
for day in list_of_days:
if day not in sub_data:
continue
day_amount = sum(sub_data[day].values())
if day in posts:
posts[day] += day_amount
else:
posts[day] = day_amount
posts_total += day_amount
# This also adds a final line summing up everything.
for day in list(sorted(posts.keys())):
line = "| {} | {:,} |".format(day, posts[day])
list_of_posts.append(line)
list_of_posts.append("| **Total** | {:,} |".format(posts_total))
posts_data = (
"### Daily Processed Posts\n\n| Date | Number of Posts |"
"\n|------|-----------------|\n{}".format("\n".join(list_of_posts))
)
# Finalize the text to return.
body = "{}\n\n{}\n\n### Daily Actions\n\n".format(added_section, posts_data)
body += "{}\n{}\n{}".format(header, divider, "\n".join(list_of_lines))
title = title[2:].strip()
return body, title
def external_mail_alert():
"""Function to mail moderators of subreddits that use the flair
enforcement function to let them know about downtime or any other
such issues. To be rarely used.
"""
flair_enforced_subreddits = database.monitored_subreddits_retrieve(True)
flair_enforced_subreddits.sort()
# Retrieve the message to send.
subject = input("\nPlease enter the subject of the message: ").strip()
subject = "[Artemis Alert] {}".format(subject)
message = input(
"\nPlease enter the message you wish to send "
"to {} subreddits: ".format(len(flair_enforced_subreddits))
).strip()
# Send the message to moderators.
for subreddit in flair_enforced_subreddits:
reddit.subreddit(subreddit).message(subject, message)
logger.info("External Mail: Sent a message to the moderators of r/{}.".format(subreddit))
return
def external_database_splitter():
"""This function splits a monolithic `_data.db` Artemis Classic
database into two separate ones for use in v2.0 Juniper. This is
local function that will eventually be deprecated but is included
for compatability purposes.
:return: `None`.
"""
logger.info("External: Beginning database intercision...")
# Define the location of the donor pre-v2.0 database file to split.
donor_db_address = FILE_ADDRESS.data_main.replace("data_main", "data")
conn_donor = sqlite3.connect(donor_db_address)
cursor_donor = conn_donor.cursor()
# Fetch the subreddit actions for saving and to be processed.
# This is because subreddit actions have to be parsed out between
# the two databases, in accordance with their action type.
cursor_donor.execute("SELECT * FROM subreddit_actions WHERE subreddit != ?", ("all",))
actions = cursor_donor.fetchall()
cursor_donor.execute("SELECT * FROM subreddit_actions WHERE subreddit = ?", ("all",))
all_actions = literal_eval(cursor_donor.fetchone()[1])
# Create the database tables if they do not already exist.
database.table_creator()
main_tables = ["monitored", "posts_filtered", "posts_operations", "posts_processed"]
stats_tables = [
"subreddit_activity",
"subreddit_stats_posts",
"subreddit_subscribers_new",
"subreddit_traffic",
"subreddit_updated",
]
# Start the copying for both databases. Subreddit actions is dealt
# with later.
database.CURSOR_MAIN.execute("ATTACH ? AS donor", (donor_db_address,))
for table in main_tables:
database.CURSOR_MAIN.execute("SELECT * FROM {}".format(table))
result = database.CURSOR_MAIN.fetchone()
if result:
logger.info("Data already exists in main table `{}`. Skipping...".format(table))
continue
command = "INSERT INTO {0} SELECT * from donor.{0}".format(table)
database.CURSOR_MAIN.execute(command)
database.CONN_MAIN.commit()
logger.info("Completed copying main database table `{}`.".format(table))
database.CURSOR_STATS.execute("ATTACH ? AS donor", (donor_db_address,))
for table in stats_tables:
database.CURSOR_STATS.execute("SELECT * FROM {}".format(table))
result = database.CURSOR_STATS.fetchone()
if result:
logger.info("Data already exists in stats table `{}`. Skipping...".format(table))
continue
command = "INSERT INTO {0} SELECT * from donor.{0}".format(table)
database.CURSOR_STATS.execute(command)
database.CONN_STATS.commit()
logger.info("Completed copying statistics database table `{}`.".format(table))
# Deal with subreddit actions.
actions_main = [
"Exported takeout data",
"Flaired post",
"Removed as moderator",
"Removed post",
"Restored post",
"Retrieved query data",
"Reverted configuration",
"Sent flair reminder",
"Updated configuration",
]
actions_stats = ["Updated statistics", "Updated userflair statistics"]
for entry in actions:
subreddit_main_actions = {}
subreddit_stats_actions = {}
# Sort out the actions by their respective databases.
subreddit = entry[0]
actions_data = literal_eval(entry[1])
for action in actions_data:
if action in actions_main:
subreddit_main_actions[action] = actions_data[action]
elif action in actions_stats:
subreddit_stats_actions[action] = actions_data[action]
elif action not in actions_main and action not in actions_stats:
logger.info("Error: Action `{}` on r/{} is not listed.".format(action, subreddit))
# Insert the actions into their respective tables.
if subreddit_main_actions:
database.CURSOR_MAIN.execute(
"INSERT INTO subreddit_actions VALUES (?, ?)",
(subreddit, str(subreddit_main_actions)),
)
database.CONN_MAIN.commit()
if subreddit_stats_actions:
database.CURSOR_STATS.execute(
"INSERT INTO subreddit_actions VALUES (?, ?)",
(subreddit, str(subreddit_stats_actions)),
)
database.CONN_STATS.commit()
logger.info("External: Completed actions transfer.")
# Now deal with the 'all' actions table. We duplicate this in the
# sense that there are going to be one respective 'all' table per
# `subreddit_actions` table that can be conjoined later.
all_main_actions = {}
all_stats_actions = {}
for date in sorted(all_actions):
date_stats = dict(
(k, all_actions[date][k]) for k in actions_stats if k in all_actions[date]
)
all_stats_actions[date] = date_stats
date_main = dict((k, all_actions[date][k]) for k in actions_main if k in all_actions[date])
all_main_actions[date] = date_main
if all_main_actions:
database.CURSOR_MAIN.execute(
"INSERT INTO subreddit_actions VALUES (?, ?)", ("all", str(all_main_actions))
)
database.CONN_MAIN.commit()
if all_stats_actions:
database.CURSOR_STATS.execute(
"INSERT INTO subreddit_actions VALUES (?, ?)", ("all", str(all_stats_actions))
)
database.CONN_STATS.commit()
logger.info("External: Completed 'all' actions transfer.")
return
if len(sys.argv) > 1:
REGULAR_MODE = False
# Get the mode keyword that's accepted after the script path.
specific_mode = sys.argv[1].strip().lower()
# noinspection PyUnboundLocalVariable
logger.info("LOCAL MODE: Launching Artemis in '{}' mode.".format(specific_mode))
if specific_mode == "start": # We want to fetch specific information for a sub.
l_mode = input("\n====\n\nEnter 'random', name of a new sub, or 'x' to exit: ")
l_mode = l_mode.lower().strip()
# Exit the routine if the value is x.
if l_mode == "x":
sys.exit()
else:
external_random_test(l_mode)
elif specific_mode == "test":
# This runs the wikipage generator through randomly selected
# subreddits that have already saved data.
l_mode = input("\n====\n\nEnter 'random', name of a sub, or 'x' to exit: ").lower().strip()
# Exit the routine if the value is x.
if l_mode == "x":
sys.exit()
else:
external_local_test(l_mode)
elif specific_mode == "userflair":
userflair_subs = input("\n====\n\nEnter a list of subreddits for userflair statistics: ")
userflair_subs_list = userflair_subs.split(",")
userflair_subs_list = [x.strip() for x in userflair_subs_list]
artemis_stats.wikipage_userflair_editor(userflair_subs_list)
elif specific_mode == "alert":
external_mail_alert()
elif specific_mode == "split":
external_database_splitter()
elif specific_mode == "stats" or specific_mode == "statistics":
month_stats = input(
"\n====\n\nEnter the month in YYYY-MM format, "
"'p' for the previous month, or 'x' to exit: "
).strip()
# database.define_database(1)
if month_stats == "p":
month_stats = timekeeping.previous_month()
print(f"> Getting statistics for {month_stats}.")
elif month_stats == "x":
sys.exit()
month_data = external_artemis_monthly_statistics(month_stats)
print(month_data[0])
permission_to_post = input("\n\n> Should I post this to the subreddit? (y/n) ").strip()
if permission_to_post.lower() == "y":
post_note = input("\n\n>> Type a note to include: ").strip()
post_note += "\n\n{}".format(month_data[0])
reddit.subreddit(INFO.username).submit(
title=month_data[1], selftext=post_note, send_replies=False, resubmit=False
)
print(">> Successfully posted statistics post. {}".format(month_data[1]))
elif specific_mode == "setup":
instance_to_set = input(
"\n====\n\nEnter a number of an " "instance to set up its databases: "
)
instance_to_set = int(instance_to_set)
database.define_database(instance_to_set)
# The last two are cron jobs.
elif specific_mode == "monitor":
# noinspection PyBroadException
try:
monitor_main()
except Exception as e:
logger.error(Exception)
elif specific_mode == "backup":
external_backup_daily()