Added improved verbose logging

This commit is contained in:
¨J-onasJones¨ 2023-12-29 01:41:03 +01:00
parent 44b92ee354
commit 5cdd2b93e2

View file

@ -208,7 +208,7 @@ def convert_monthly_content_to_json(content, year, month):
print("[IGNORED] Error parsing line: '" + line + "'") print("[IGNORED] Error parsing line: '" + line + "'")
print(e) print(e)
print(f" ==>Found {len(json_data)} entries in {year}-{month}.") print(f"[{progress}%] Found and parsed {len(json_data)} entries in {year}-{month}." + " "*17)
return json_data return json_data
@ -225,7 +225,7 @@ def fetch_monthly_page(wiki_link, subreddit_name):
#wiki_page = wiki_page[:wiki_page.find("\n\n")] #wiki_page = wiki_page[:wiki_page.find("\n\n")]
year = wiki_link.split('/')[1] year = wiki_link.split('/')[1]
month = wiki_link.split('/')[2] month = wiki_link.split('/')[2].lower()
month = month.replace("january", "01") month = month.replace("january", "01")
month = month.replace("february", "02") month = month.replace("february", "02")
@ -262,6 +262,8 @@ reddit = praw.Reddit(
) )
# fetch subreddit # fetch subreddit
print("Fetching Months...")
try: try:
subreddit = reddit.subreddit(subreddit_name) subreddit = reddit.subreddit(subreddit_name)
except praw.exceptions.PRAWException as e: except praw.exceptions.PRAWException as e:
@ -270,20 +272,27 @@ except praw.exceptions.PRAWException as e:
# fetch wiki page # fetch wiki page
content = fetch_main_reddit_wiki_page(subreddit_name, wiki_page_name) content = fetch_main_reddit_wiki_page(subreddit_name, wiki_page_name)
print("Done!")
if content: if content:
json_data = [] json_data = []
for wiki_link in content[::-1]: for wiki_link in content[::-1]:
progress = int(content[::-1].index(wiki_link)/len(content)*100) progress = int(content[::-1].index(wiki_link)+1/len(content)*100)
if progress < 10: if progress < 10:
progress = " " + str(progress) progress = " " + str(progress)
elif progress < 100: elif progress < 100:
progress = " " + str(progress) progress = " " + str(progress)
print(f"[{progress}%]Fetching monthly page: " + wiki_link) #print(" ==>", end="\n")
print(f"[{progress}%] Fetching monthly page: " + wiki_link, end="\r")
# sleep for 2 seconds to avoid getting rate limited
# reddit api is awful
time.sleep(2)
try: try:
# fetch the monthly page and parse it # fetch the monthly page and parse it
@ -296,11 +305,7 @@ if content:
print(e) print(e)
exit(1) exit(1)
print(f"[{progress}%]Parsed monthly page: " + wiki_link) #print(f"[{progress}%] Parsed monthly page: " + wiki_link + " ", end="\r")
# sleep for 2 seconds to avoid getting rate limited
# reddit api is awful
time.sleep(2)
# add a first element to the list that holds the date of the last update # add a first element to the list that holds the date of the last update
json_data.insert(0, {"last_update": time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) + " UTC"}) json_data.insert(0, {"last_update": time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) + " UTC"})
@ -315,6 +320,8 @@ if content:
cdn_upload_cmd = "rclone copy rkpop_data.json cdn:cdn/api/kcomebacks/" cdn_upload_cmd = "rclone copy rkpop_data.json cdn:cdn/api/kcomebacks/"
if UPLOAD_TO_CDN: if UPLOAD_TO_CDN:
print("Uploading...")
os.system(cdn_upload_cmd) os.system(cdn_upload_cmd)
elif input("Upload to cdn? [Y/n]") in ["Y", "y", ""]: elif input("Upload to cdn? [Y/n]") in ["Y", "y", ""]:
print("Uploading...")
os.system(cdn_upload_cmd) os.system(cdn_upload_cmd)