Implement per-entry patches

Instead of one temporary JSON file, contributors now can submit patches in form of separate files per entry, that will be merged by `merge_out.py` without dealing with the potential conflicts to the main `atlas.json`.
This commit is contained in:
Hans5958 2023-06-15 21:22:35 +07:00
parent e336fe452d
commit 978757ce8f
5 changed files with 185 additions and 132 deletions

36
tools/create_patch.py Normal file
View File

@ -0,0 +1,36 @@
import json
import os
import secrets
from pathlib import Path
patches_dir = "../data/patches/"
Path(patches_dir).mkdir(parents=True, exist_ok=True)
entry = None
entry_input = ""
print("Write your submission entry here.")
while entry is None:
entry_input += input("> ")
try:
entry = json.loads(entry_input)
except:
pass
print()
print("Entry received!")
print()
print("Enter your username as the attribution to be shown on the about page.")
print("Leave it empty if you don't want to.")
print("You can use your Reddit username. Do not include the \"u/\" part.")
print("You can also your GitHub username, but add \"gh:\" before your username (e.g. \"gh:octocat\")")
author = input("Author: ")
if author:
entry['_author'] = author
with open(f'{patches_dir}gh-{secrets.token_hex(2)}-{"-".join(entry["name"].split()).lower()}.json', 'w', encoding='utf-8') as out_file:
out_file.write(json.dumps(entry, ensure_ascii=False))
print("Patch created!")
print("You can commit this file directory, after that you can push and create a pull request.")

View File

@ -5,87 +5,98 @@ import scale_back
from scale_back import ScaleConfig
merge_source_file = 'temp-atlas.json'
out_ids = []
atlas_ids = {}
authors = []
with open(merge_source_file, 'r', encoding='UTF-8') as f1:
out_json = json.loads(f1.read())
with open('../web/all-authors.txt', 'r') as authors_file:
authors = authors_file.read().strip().split()
format_all_entries(out_json)
with open('../web/read_ids.txt', 'r') as ids_file:
out_ids = ids_file.read().strip().split()
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
atlas_data = json.loads(atlas_file.read())
format_all_entries(atlas_file)
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
scale_back.scale_back_entries(out_json)
scale_back.scale_back_entries(atlas_file)
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
scale_back.scale_back_entries(out_json)
scale_back.scale_back_entries(atlas_file)
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
scale_back.scale_back_entries(out_json)
scale_back.scale_back_entries(atlas_file)
out_ids = set()
out_dupe_ids = set()
atlas_ids = {}
last_id = 0
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
atlas_json = json.loads(atlas_file.read())
for i, entry in enumerate(atlas_json):
for i, entry in enumerate(atlas_data):
atlas_ids[entry['id']] = i
id = entry['id']
if id.isnumeric() and int(id) > last_id and int(id) - last_id < 100:
last_id = int(id)
last_existing_id = list(atlas_json[-1]['id'])
patches_dir = "../data/patches/"
if not os.path.exists(patches_dir):
print("Patches folder not found. Exiting.")
exit()
for filename in os.listdir(patches_dir):
f = os.path.join(patches_dir, filename)
print(f"{filename}: Processing...")
for entry in out_json:
if entry['id'] == 0 or entry['id'] == '0':
# "Increment" the last ID to derive a new ID.
current_index = -1
while current_index > -(len(last_existing_id)):
current_char = last_existing_id[current_index]
if current_char == 'z':
last_existing_id[current_index] = '0'
current_index -= 1
else:
if current_char == '9':
current_char = 'a'
else:
current_char = chr(ord(current_char) + 1)
last_existing_id[current_index] = current_char
break
entry['id'] = ''.join(last_existing_id)
for entry in out_json:
if entry['id'] in out_ids:
print(f"Entry {entry['id']} has duplicates! Please resolve this conflict. This will be excluded from the merge.")
out_dupe_ids.add(entry['id'])
out_ids.add(entry['id'])
for entry in out_json:
if entry['id'] in out_dupe_ids:
if not os.path.isfile(f) or not f.endswith('json'):
continue
if 'edit' in entry and entry['edit']:
assert entry['id'] in atlas_ids, "Edit failed! ID not found on Atlas."
index = atlas_ids[entry['id']]
with open(f, 'r', encoding='utf-8') as entry_file:
entry = json.loads(entry_file.read())
assert index != None, "Edit failed! ID not found on Atlas."
if '_reddit_id' in entry:
reddit_id = entry['_reddit_id']
if reddit_id in out_ids:
print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.")
continue
out_ids.append(reddit_id)
del entry['_reddit_id']
print(f"Edited {atlas_json[index]['id']} with {entry['edit']}")
if '_author' in entry:
author = entry['_author']
if author not in authors:
authors.append(author)
del entry['_author']
del entry['edit']
atlas_json[index] = entry
elif entry['id'] in atlas_ids:
print(f"Edited {entry['id']} manually.")
atlas_json[atlas_ids[entry['id']]] = entry
else:
print(f"Added {entry['id']}.")
atlas_json.append(entry)
if entry['id'] in out_ids:
print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.")
continue
if entry['id'] < 1:
last_id += 1
print(f"{filename}: Entry is new, assigned ID {last_id}")
entry['id'] = str(last_id)
else:
out_ids.append(entry['id'])
if entry['id'] in atlas_ids:
index = atlas_ids[entry['id']]
print(f"{filename}: Edited {atlas_data[index]['id']}.")
atlas_data[index] = entry
else:
print(f"{filename}: Added {entry['id']}.")
atlas_data.append(entry)
os.remove(f)
print('Writing...')
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
per_line_entries(atlas_json, atlas_file)
per_line_entries(atlas_data, atlas_file)
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
with open('temp-read-ids.txt', 'r+', encoding='utf-8') as read_ids_temp_file:
read_ids_file.writelines(read_ids_temp_file.readlines())
read_ids_temp_file.truncate(0)
with open('../data/read-ids.txt', 'w', encoding='utf-8') as ids_file:
ids_file.write("\n".join(out_ids) + "\n")
with open('../web/all-authors.txt', 'w', encoding='utf-8') as authors_file:
authors_file.write("\n".join(authors) + "\n")
print('All done.')

View File

@ -17,74 +17,90 @@ Running:
1. Run the script
2. Input the next ID to use
3. Manually resolve errors in temp-atlas-manual.json
4 a. Use merge_out.py, or...
b. a. Copy temp-atlas.json entries into web/_js/atlas.js (mind the edits!)
b. Copy temp-read-ids.txt IDs into data/read-ids.txt
4. a. Use merge_out.py, or...
b. a. Copy temp-atlas.json entries into web/_js/atlas.js (mind the edits!)
b. Copy temp-read-ids.txt IDs into data/read-ids.txt
5. Create a pull request
"""
import praw
from praw import Reddit
from praw.models import Submission
import json
import time
import re
import traceback
from aformatter import format_all, validate
from pathlib import Path
import humanize
from datetime import datetime
import secrets
with open('temp-atlas.json', 'w', encoding='utf-8') as OUT_FILE, open('temp-read-ids.txt', 'w') as READ_IDS_FILE, open('temp-atlas-manual.txt', 'w', encoding='utf-8') as FAIL_FILE:
patches_dir = "../data/patches/"
Path(patches_dir).mkdir(parents=True, exist_ok=True)
OUT_FILE_LINES = ['[\n', ']\n']
def set_flair(submission, flair):
if has_write_access and submission.link_flair_text != flair:
flair_choices = submission.flair.choices()
flair = next(x for x in flair_choices if x["flair_text_editable"] and flair == x["flair_text"])
submission.flair.select(flair["flair_template_id"])
with open('credentials', 'r') as file:
credentials = file.readlines()
client_id = credentials[0].strip()
client_secret = credentials[1].strip()
username = credentials[2].strip() if len(credentials) > 3 else ""
password = credentials[3].strip() if len(credentials) > 3 else ""
reddit = praw.Reddit(
client_id=client_id,
client_secret=client_secret,
username=username,
password=password,
user_agent='atlas_bot'
)
with open('credentials', 'r') as file:
credentials = file.readlines()
client_id = credentials[0].strip()
client_secret = credentials[1].strip()
username = credentials[2].strip() if len(credentials) > 3 else ""
password = credentials[3].strip() if len(credentials) > 3 else ""
has_write_access = not reddit.read_only
if not has_write_access:
print("Warning: No write access. Post flairs will not be updated.")
time.sleep(5)
reddit = Reddit(
client_id=client_id,
client_secret=client_secret,
username=username,
password=password,
user_agent='atlas_bot'
)
existing_ids = []
has_write_access = not reddit.read_only
if not has_write_access:
print("Warning: No write access. Post flairs will not be updated. Waiting 5 seconds...")
# time.sleep(5)
with open('../data/read-ids.txt', 'r') as edit_ids_file:
for id in [x.strip() for x in edit_ids_file.readlines()]:
existing_ids.append(id)
print("Running...")
def set_flair(submission, flair):
if has_write_access and submission.link_flair_text != flair:
flair_choices = submission.flair.choices()
flair = next(x for x in flair_choices if x["flair_text_editable"] and flair == x["flair_text"])
submission.flair.select(flair["flair_template_id"])
existing_ids = []
total_all_flairs = 0
duplicate_count = 0
failcount = 0
successcount = 0
totalcount = 0
with open('../data/read-ids.txt', 'r') as edit_ids_file:
for id in [x.strip() for x in edit_ids_file.readlines()]:
existing_ids.append(id)
for submission in reddit.subreddit('placeAtlas2').new(limit=2000):
total_all_flairs = 0
count_dupe = 0
count_fail = 0
count_success = 0
count_total = 0
with open('temp-atlas-manual.txt', 'w', encoding='utf-8') as FAIL_FILE:
submission: Submission
for submission in reddit.subreddit('u_Hans5958_').new(limit=5):
total_all_flairs += 1
if (submission.id in existing_ids):
set_flair(submission, "Processed Entry")
print("Found first duplicate!")
duplicate_count += 1
if (duplicate_count > 0):
break
else:
continue
print(f"{submission.id}: Submitted {humanize.naturaltime(datetime.utcnow() - datetime.utcfromtimestamp(submission.created_utc))}.")
if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry":
# print(patches_dir + 'reddit-' + submission.id + '.json')
if submission.id in existing_ids or Path(patches_dir + 'reddit-' + submission.id + '.json').is_file():
set_flair(submission, "Processed Entry")
print(f"{submission.id}: Submission is a duplicate! Skipped.")
if (count_dupe == 1):
print(f"{submission.id}: Second duplicate. Stopped!")
break
print(f"{submission.id}: First duplicate. Continue running.")
count_dupe += 1
continue
print(f"{submission.id}: Processing...")
if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry" or True:
try:
@ -102,16 +118,11 @@ with open('temp-atlas.json', 'w', encoding='utf-8') as OUT_FILE, open('temp-read
if submission_json:
if submission.link_flair_text == "Edit Entry":
assert submission_json["id"] > 0, "Edit invalid because ID is tampered, it must not be 0 or -1!"
submission_json_dummy = {"id": submission_json["id"], "edit": submission.id}
else:
assert submission_json["id"] <= 0, "Addition invalid because ID is tampered, it must be 0 or -1!"
submission_json_dummy = {"id": submission.id}
submission_json_dummy = {"id": submission_json["id"], "_reddit_id": submission.id, "_author": submission.author.name}
for key in submission_json:
if not key in submission_json_dummy:
@ -121,13 +132,11 @@ with open('temp-atlas.json', 'w', encoding='utf-8') as OUT_FILE, open('temp-read
assert validation_status < 3, \
"Submission invalid after validation. This may be caused by not enough points on the path."
with open(f'{patches_dir}reddit-{submission.id}-{"-".join(submission["name"].split()).lower()}.json', 'w', encoding='utf-8') as out_file:
out_file.write(json.dumps(submission_json, ensure_ascii=False))
add_comma_line = len(OUT_FILE_LINES) - 2
if len(OUT_FILE_LINES[add_comma_line]) > 2:
OUT_FILE_LINES[add_comma_line] = OUT_FILE_LINES[add_comma_line].replace('\n', ',\n')
OUT_FILE_LINES.insert(len(OUT_FILE_LINES) - 1, json.dumps(submission_json, ensure_ascii=False) + '\n')
READ_IDS_FILE.write(submission.id + '\n')
successcount += 1
count_success += 1
set_flair(submission, "Processed Entry")
except Exception as e:
@ -140,12 +149,11 @@ with open('temp-atlas.json', 'w', encoding='utf-8') as OUT_FILE, open('temp-read
"==== CLEAN ====" + "\n\n" +
text + "\n\n"
)
failcount += 1
count_fail += 1
set_flair(submission, "Rejected Entry")
print(f"{submission.id}: Something went wrong! Rejected.")
print("Wrote " + submission.id + ", submitted " + str(round(time.time()-submission.created_utc)) + " seconds ago")
totalcount += 1
count_total += 1
print(f"{submission.id}: Processed!")
OUT_FILE.writelines(OUT_FILE_LINES)
print(f"\n\nTotal all flairs: {total_all_flairs}\nSuccess: {successcount}/{totalcount}\nFail: {failcount}/{totalcount}\nPlease check temp-atlas-manual.txt for failed entries to manually resolve.")
print(f"\n\nTotal all flairs: {total_all_flairs}\nSuccess: {count_success}/{count_total}\nFail: {count_fail}/{count_total}\nPlease check temp-atlas-manual.txt for failed entries to manually resolve.")

View File

@ -1,2 +1,3 @@
praw
tqdm
tqdm
humanize

View File

@ -5075,7 +5075,6 @@ JohnnyHotshot
-robotic
olly
Shadox
Ericbazinga
MingCate
SlipsSC_
carlyc999
@ -5128,7 +5127,7 @@ p1terdeN
IncestSimulator2016
zephyr12345
Blizhazard
Fishes_Glubs & GamerKingFaiz
GamerKingFaiz
Wodgam
TheNomad
VinsElBins
@ -5160,7 +5159,6 @@ neurospex
soopimus_
SporekidX
ForsenPlace
scorpion24100 / ThePizzaMuncher
Vapku
BouchonEnPlastique
SailorElei
@ -5298,7 +5296,6 @@ HappyMerlin
YummyGummyDrops
Forcoy
RookeMistake
slanterns
raudrin
AriaNoire
evaroussel
@ -5407,4 +5404,4 @@ Hellmustang0226
tiny2ooons
duroki66
Aloxite
Polygonboy0
Polygonboy0