Merge pull request #1373 from placeAtlas/cleanup

Cleanup->Master
This commit is contained in:
Stefano 2022-05-22 19:14:36 +02:00 committed by GitHub
commit aa87d37827
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 8446 additions and 8222 deletions

View file

@ -9715,9 +9715,35 @@ ui1nsg
uhwzjt
uhw2h0
uj1yb5
up28w9
up208s
up7z2o
up58ba
up3yfl
up3ty4
up28w9
up208s
up7z2o
up58ba
up3yfl
up3ty4
uqixxl
uqgttl
uqg3it
upzg49
urripm
usn581
usn14a
usmv6w
usmfke
usmdg8
usm0g8
uslwi8
usoilo
uu388f
uu34f8
uu32gh
uu30uk
uu2y6x
uu2w70
uu2uas
uu2rnm
uu2p0n
uui38k
uv0679
uuy3sn
uuy1e5

View file

@ -2,9 +2,16 @@
import re
import json
import math
import traceback
from typing import List
END_NORMAL_IMAGE = "164"
END_WHITEOUT_IMAGE = "166"
NORMAL_IMAGE_SUFFIX = "-" + END_NORMAL_IMAGE
WHITEOUT_IMAGE_SUFFIX = "-" + END_WHITEOUT_IMAGE
"""
Examples:
1. - /r/place
@ -172,7 +179,7 @@ def fix_no_protocol_urls(entry: dict):
if "links" in entry and "website" in entry['links']:
for i in range(len(entry["links"]["website"])):
if entry["links"]["website"][i] and not entry["links"]["website"][i].startswith("http"):
entry["links"]["website"][i] = "https://" + entry["website"]
entry["links"]["website"][i] = "https://" + entry["links"]["website"][i]
return entry
@ -278,28 +285,24 @@ def sort_image_keys(entry: dict):
return entry
def extend_entries_to_whiteout(entry: dict):
def floor_points(entry: dict):
"""
If an entry ends on the final non-whiteout image, extends the image to the last whiteout image where entries cans still be made out.
Floors points on path and center, removing the decimal count.
"""
END_NORMAL_IMAGE = "164"
END_WHITEOUT_IMAGE = "166"
NORMAL_IMAGE_SUFFIX = "-" + END_NORMAL_IMAGE
WHITEOUT_IMAGE_SUFFIX = "-" + END_WHITEOUT_IMAGE
for outer_key in ["path", "center"]:
image_keys: List[str] = list(entry[outer_key].keys())
for image_key in image_keys:
new_key = None
if NORMAL_IMAGE_SUFFIX in image_key:
new_key = image_key.replace(NORMAL_IMAGE_SUFFIX, WHITEOUT_IMAGE_SUFFIX)
elif image_key == END_NORMAL_IMAGE:
new_key = END_NORMAL_IMAGE + WHITEOUT_IMAGE_SUFFIX
entry[outer_key][new_key] = entry[outer_key][image_key]
del(entry[outer_key][image_key])
for period in entry["path"]:
for points in entry["path"][period]:
points[0] = math.floor(points[0])
points[1] = math.floor(points[1])
for period in entry["center"]:
points = entry["center"][period]
points[0] = math.floor(points[0])
points[1] = math.floor(points[1])
return entry
def validate(entry: dict):
"""
Validates the entry. Catch errors and tell warnings related to the entry.
@ -350,17 +353,11 @@ def per_line_entries(entries: list):
def format_all(entry: dict, silent=False):
"""
Format using all the available formatters.
Outputs a tuple containing the entry and the validation status code.
Status code key:
0: All valid, no problems
1: Informational logs that may be ignored
2: Warnings that may effect user experience when interacting with the entry
3: Errors that make the entry inaccessible or broken.
"""
def print_(*args, **kwargs):
if not silent:
print(*args, **kwargs)
print_("Fixing r/ capitalization...")
entry = fix_r_caps(entry)
print_("Fix formatting of subreddit...")
@ -383,41 +380,42 @@ def format_all(entry: dict, silent=False):
entry = remove_empty_and_similar(entry)
print_("Sorting image keys...")
entry = sort_image_keys(entry)
print_("Extending entries to whiteout...")
entry = extend_entries_to_whiteout(entry)
print_("Validating...")
status_code = validate(entry)
print_("Flooring points...")
entry = floor_points(entry)
print_("Completed!")
return ( entry, status_code )
return entry
def go(path):
print(f"Formatting {path}...")
with open(path, "r+", encoding='UTF-8') as f1:
entries = json.loads(f1.read())
for i in range(len(entries)):
try:
entry_formatted = format_all(entries[i], True)
validation_status = validate(entries[i])
if validation_status > 2:
print(f"Entry {entry_formatted['id']} will be removed! {json.dumps(entry_formatted)}")
entries[i] = None
else:
entries[i] = entry_formatted
except Exception:
print(f"Exception occured when formatting ID {entries[i]['id']}")
print(traceback.format_exc())
if not (i % 200):
print(f"{i} checked.")
print(f"{len(entries)} checked. Writing...")
with open(path, "w", encoding='utf-8', newline='\n') as f2:
f2.write(per_line_entries(entries))
print("Writing completed. All done.")
if __name__ == '__main__':
def go(path):
print(f"Formatting {path}...")
with open(path, "r+", encoding='UTF-8') as f1:
entries = json.loads(f1.read())
for i in range(len(entries)):
try:
entry_formatted, validation_status = format_all(entries[i], True)
if validation_status > 2:
print(f"Entry {entry_formatted['id']} will be removed! {json.dumps(entry_formatted)}")
entries[i] = None
else:
entries[i] = entry_formatted
except Exception:
print(f"Exception occured when formatting ID {entries[i]['id']}")
print(traceback.format_exc())
if not (i % 200):
print(f"{i} checked.")
print(f"{len(entries)} checked. Writing...")
with open(path, "w", encoding='utf-8', newline='\n') as f2:
f2.write(per_line_entries(entries))
print("Writing completed. All done.")
go("../web/atlas.json")

View file

@ -1,11 +1,28 @@
import json
from formatter import per_line_entries
import os
import formatter
import scale_back
from scale_back import ScaleConfig
merge_source_file = 'temp_atlas.json'
formatter.go(merge_source_file)
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
scale_back.go(merge_source_file)
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
scale_back.go(merge_source_file)
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
scale_back.go(merge_source_file)
out_ids = set()
out_dupe_ids = set()
atlas_ids = {}
with open('temp_atlas.json', 'r', encoding='utf-8') as out_file:
with open(merge_source_file, 'r', encoding='utf-8') as out_file:
out_json = json.loads(out_file.read())
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
@ -43,7 +60,7 @@ for entry in out_json:
print('Writing...')
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
atlas_file.write(per_line_entries(atlas_json))
atlas_file.write(formatter.per_line_entries(atlas_json))
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
with open('read-ids-temp.txt', 'r', encoding='utf-8') as read_ids_temp_file:

View file

@ -28,8 +28,7 @@ import json
import time
import re
import traceback
from formatter import format_all
from migrate_atlas_format import migrate_atlas_format
from formatter import format_all, validate
OUT_FILE = open('temp_atlas.json', 'w', encoding='utf-8')
READ_IDS_FILE = open('read-ids-temp.txt', 'w')
@ -45,7 +44,7 @@ with open('credentials', 'r') as file:
password = credentials[3].strip() if len(credentials) > 3 else ""
reddit = praw.Reddit(
client_id=client_id,
client_id=client_id,
client_secret=client_secret,
username=username,
password=password,
@ -86,7 +85,7 @@ for submission in reddit.subreddit('placeAtlas2').new(limit=2000):
break
else:
continue
if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry":
try:
@ -113,19 +112,18 @@ for submission in reddit.subreddit('placeAtlas2').new(limit=2000):
else:
assert submission_json["id"] == 0, "Edit invalid because ID is tampered, it must be 0!"
submission_json_dummy = {"id": submission.id}
for key in submission_json:
if not key in submission_json_dummy:
submission_json_dummy[key] = submission_json[key];
(submission_json, validation_status) = format_all(submission_json_dummy, True)
submission_json = format_all(submission_json_dummy, True)
validation_status = validate(submission_json)
assert validation_status < 3, \
"Submission invalid after validation. This may be caused by not enough points on the path."
submission_json = migrate_atlas_format(submission_json)
add_comma_line = len(OUT_FILE_LINES) - 2
if len(OUT_FILE_LINES[add_comma_line]) > 2:
OUT_FILE_LINES[add_comma_line] = OUT_FILE_LINES[add_comma_line].replace('\n', ',\n')

205
tools/scale_back.py Normal file
View file

@ -0,0 +1,205 @@
#!/usr/bin/python
import json
import traceback
import numpy
from PIL import Image, ImageDraw
import gc
"""
# 166 to 164 with reference of 165
shrink
166
164
20
web\_img\canvas\place30\159.png
web\_img\canvas\place30\163_159.png
# 166 to 165 with reference of 166
shrink
166
165
20
web\_img\canvas\place30\159.png
web\_img\canvas\place30\164_159.png
# 164 to 165 with reference of 165
shrink
164
165
20
web\_img\canvas\place30\159.png
web\_img\canvas\place30\163_159.png
# 166 to 167 with reference of 167
expand
166
167
20
web\_img\canvas\place30\159.png
web\_img\canvas\place30\165_159.png
"""
class ScaleConfig:
type = 'expand'
source = ''
destination = ''
threshold = 20
image1 = ''
image2 = ''
def swap_source_dest(source, destination, image2):
ScaleConfig.source = source
ScaleConfig.destination = destination
ScaleConfig.image2 = image2
def remove_white(entry: dict):
canvas_ref = Image.new('RGBA', (2000,2000))
with Image.open(ScaleConfig.image1).convert('RGBA') as image1:
if ScaleConfig.image2:
with Image.open(ScaleConfig.image2).convert('RGBA') as image2:
canvas_ref.paste(image1, (0, 0), image1)
canvas_ref.paste(image2, (0, 0), image2)
canvas_ref
else:
canvas_ref.paste(image1, (0, 0), image1)
# uncomment when you need to see the source canvas
# canvas_ref.show()
# print(entry['path'])
for (period, polygonList) in entry['path'].items():
if not f"-{ScaleConfig.source}" in period: continue
# Get bounding rectangle and have a list of tuples for polygon
polygon = []
x_box = 2000
y_box = 2000
x_box2 = 0
y_box2 = 0
for point in polygonList:
x_box = min(x_box, max(point[0] - 1.5, 0))
y_box = min(y_box, max(point[1] - 1.5, 0))
x_box2 = max(x_box2, min(point[0] + 1.5, 2000))
y_box2 = max(y_box2, min(point[1] + 1.5, 2000))
polygon.append(tuple(point))
x_box = int(x_box)
y_box = int(y_box)
x_box2 = int(x_box2)
y_box2 = int(y_box2)
# Crop the image based on polygon
# https://stackoverflow.com/questions/22588074/
imArray = numpy.asarray(canvas_ref)
with Image.new('L', (imArray.shape[1], imArray.shape[0]), 0) as maskIm:
ImageDraw.Draw(maskIm).polygon(polygon, outline=1, fill=1)
mask = numpy.array(maskIm)
newImArray = numpy.empty(imArray.shape,dtype='uint8')
newImArray[:,:,:3] = imArray[:,:,:3]
newImArray[:,:,3] = mask*255
imArray = newImArray[y_box:y_box2,x_box:x_box2,:]
# points = numpy.array([polygon])
# print(points)
# print(cv2.boundingRect(points[0]))
# print(1)
# print(imArray)
colored_pixel_count: int = 0
all_pixel_count: int = 0
# Read the area based on bounding box
for x in imArray:
for pixel in x:
if pixel[3] == 0: continue
all_pixel_count += 1
if (pixel[1] == 255 and pixel[2] == 255): continue
colored_pixel_count += 1
if all_pixel_count == 0: break
colorness = (100 * colored_pixel_count)/all_pixel_count
if (ScaleConfig.type == "shrink" and colorness < ScaleConfig.threshold) or (ScaleConfig.type == "expand" and colorness > ScaleConfig.threshold):
print(f"[{entry['id']} {period}] {colored_pixel_count}/{all_pixel_count} ({colorness}%)")
new_period = period.replace(f'-{ScaleConfig.source}', f'-{ScaleConfig.destination}')
entry['path'][new_period] = entry['path'][period]
del entry['path'][period]
entry['center'][new_period] = entry['center'][period]
del entry['center'][period]
break
# newIm = Image.fromarray(newImArray, "RGBA")
# newIm.show()
break
return entry
def per_line_entries(entries: list):
"""
Returns a string of all the entries, with every entry in one line.
"""
out = "[\n"
for entry in entries:
if entry:
out += json.dumps(entry, ensure_ascii=False) + ",\n"
out = out[:-2] + "\n]"
return out
def format_all(entry: dict, silent=False):
def print_(*args, **kwargs):
if not silent:
print(*args, **kwargs)
entry = remove_white(entry)
print_("Completed!")
return entry
def go(path):
print(f"Scaling whiteout for {path}...")
with open(path, "r+", encoding='UTF-8') as f1:
entries = json.loads(f1.read())
for i in range(len(entries)):
try:
entry_formatted = format_all(entries[i], True)
entries[i] = entry_formatted
except Exception:
print(f"Exception occured when formatting ID {entries[i]['id']}")
print(traceback.format_exc())
if not (i % 50):
print(f"{i} checked.")
gc.collect()
print(f"{len(entries)} checked. Writing...")
with open(path, "w", encoding='utf-8', newline='\n') as f2:
f2.write(per_line_entries(entries))
print("Writing completed. All done.")
if __name__ == '__main__':
ScaleConfig.type = input("Type (shrink/expand): ")
ScaleConfig.source = input("Source: ")
ScaleConfig.destination = input("Destination: ")
ScaleConfig.threshold = int(input("Threshold (%): "))
ScaleConfig.image1 = input("Reference canvas layer 1: ")
ScaleConfig.image2 = input("Reference canvas layer 2: ")
go("web/atlas.json")

View file

@ -270,8 +270,8 @@ function initDraw() {
pathWithPeriodsTemp.forEach(([key, value]) => {
// TODO: Compress periods on something like 0-13, 14.
exportObject.path[key] = value
exportObject.center[key] = calculateCenter(value)
exportObject.path[key] = value.map(point => point.map(int => int - 0.5))
exportObject.center[key] = calculateCenter(value).map(int => int - 0.5)
})
const inputWebsite = websiteGroupElements.map(element => element.value.trim()).filter(element => element)

View file

@ -516,39 +516,24 @@ async function init() {
function updateAtlasAll(atlas = atlasAll) {
for (const atlasIndex in atlas) {
if (Array.isArray(atlas[atlasIndex].path)) {
const currentPath = atlas[atlasIndex].path
atlas[atlasIndex].path = {}
atlas[atlasIndex].path[defaultPeriod] = currentPath
const currentLinks = atlas[atlasIndex].links
atlas[atlasIndex].links = {
website: [],
subreddit: [],
discord: [],
wiki: [],
...currentLinks
}
if (Array.isArray(atlas[atlasIndex].center)) {
const currentCenter = atlas[atlasIndex].center
atlas[atlasIndex].center = {}
atlas[atlasIndex].center[defaultPeriod] = currentCenter
const currentPath = atlas[atlasIndex].path
const currentCenter = atlas[atlasIndex].center
for (const key in currentPath) {
currentPath[key] = currentPath[key].map(point => point.map(int => int + 0.5))
}
if (atlas[atlasIndex].links) {
const currentLinks = atlas[atlasIndex].links
atlas[atlasIndex].links = {
website: [],
subreddit: [],
discord: [],
wiki: [],
...currentLinks
}
} else {
atlas[atlasIndex].links = {
website: [],
subreddit: [],
discord: [],
wiki: []
}
if (atlas[atlasIndex].website) atlas[atlasIndex].links.website = [atlas[atlasIndex].website]
if (atlas[atlasIndex].subreddit) atlas[atlasIndex].links.subreddit = atlas[atlasIndex].subreddit.split(',').map(subreddit => subreddit.trim().replace(/^\/r\//, ''))
delete atlas[atlasIndex].website
delete atlas[atlasIndex].subreddit
for (const key in currentCenter) {
currentCenter[key] = currentCenter[key].map(int => int + 0.5)
}
atlas[atlasIndex].path = currentPath
atlas[atlasIndex].center = currentCenter
}
return atlas
}

File diff suppressed because one or more lines are too long

View file

@ -358,7 +358,7 @@
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body d-flex flex-column">
<div class="period-status alert alert-warning small py-2" role="alert">We've just released our "remaster" of the new atlas, expect some bugs to show after submitting. If something happens, join our discord!</div>
<div class="period-status alert alert-warning small py-2" role="alert">We've just released our "remaster" of the Atlas. Bugs may happen after submitting. If something happens, tell us on Discord!</div>
<p>Use the Post Direct to Reddit button or manually copy the text below and submit it as a new text post to <a href="https://www.reddit.com/r/placeAtlas2/" target="_blank" rel="noopener noreferrer">r/placeAtlas2</a> on Reddit.</p>
<p>Don't forget to flair it with the <span class="badge rounded-pill bg-primary"><i class="bi bi-tag" aria-hidden="true"></i> <span id="redditFlair">New Entry</span></span> tag.</p>
<p>We will then check it and add it to the atlas.</p>