mirror of
https://github.com/placeAtlas/atlas.git
synced 2025-01-23 10:10:04 +01:00
Merge branch 'cleanup' of github.com:GeoDash897/place-atlas into cleanup
This commit is contained in:
commit
d98f4ace13
6 changed files with 3791 additions and 3542 deletions
|
@ -9733,3 +9733,27 @@ usmfke
|
|||
usmdg8
|
||||
usm0g8
|
||||
uslwi8
|
||||
usoilo
|
||||
uu388f
|
||||
uu34f8
|
||||
uu32gh
|
||||
uu30uk
|
||||
uu2y6x
|
||||
uu2w70
|
||||
uu2uas
|
||||
uu2rnm
|
||||
uu2p0n
|
||||
uui38k
|
||||
uv0679
|
||||
uuy3sn
|
||||
uuy1e5
|
||||
uxegrh
|
||||
uwuon8
|
||||
uw90m9
|
||||
uvnhvv
|
||||
uvm3w1
|
||||
uvfo1y
|
||||
uv39r0
|
||||
uyhbui
|
||||
uz6kxi
|
||||
uywxxt
|
||||
|
|
|
@ -179,7 +179,7 @@ def fix_no_protocol_urls(entry: dict):
|
|||
if "links" in entry and "website" in entry['links']:
|
||||
for i in range(len(entry["links"]["website"])):
|
||||
if entry["links"]["website"][i] and not entry["links"]["website"][i].startswith("http"):
|
||||
entry["links"]["website"][i] = "https://" + entry["website"]
|
||||
entry["links"]["website"][i] = "https://" + entry["links"]["website"][i]
|
||||
|
||||
return entry
|
||||
|
||||
|
@ -285,23 +285,6 @@ def sort_image_keys(entry: dict):
|
|||
|
||||
return entry
|
||||
|
||||
def extend_entries_to_whiteout(entry: dict):
|
||||
"""
|
||||
If an entry ends on the final non-whiteout image, extends the image to the last whiteout image where entries cans still be made out.
|
||||
"""
|
||||
for outer_key in ["path", "center"]:
|
||||
image_keys: List[str] = list(entry[outer_key].keys())
|
||||
for image_key in image_keys:
|
||||
new_key = None
|
||||
if NORMAL_IMAGE_SUFFIX in image_key:
|
||||
new_key = image_key.replace(NORMAL_IMAGE_SUFFIX, WHITEOUT_IMAGE_SUFFIX)
|
||||
elif image_key == END_NORMAL_IMAGE:
|
||||
new_key = END_NORMAL_IMAGE + WHITEOUT_IMAGE_SUFFIX
|
||||
entry[outer_key][new_key] = entry[outer_key][image_key]
|
||||
del(entry[outer_key][image_key])
|
||||
|
||||
return entry
|
||||
|
||||
def floor_points(entry: dict):
|
||||
"""
|
||||
Floors points on path and center, removing the decimal count.
|
||||
|
@ -370,17 +353,11 @@ def per_line_entries(entries: list):
|
|||
def format_all(entry: dict, silent=False):
|
||||
"""
|
||||
Format using all the available formatters.
|
||||
Outputs a tuple containing the entry and the validation status code.
|
||||
|
||||
Status code key:
|
||||
0: All valid, no problems
|
||||
1: Informational logs that may be ignored
|
||||
2: Warnings that may effect user experience when interacting with the entry
|
||||
3: Errors that make the entry inaccessible or broken.
|
||||
"""
|
||||
def print_(*args, **kwargs):
|
||||
if not silent:
|
||||
print(*args, **kwargs)
|
||||
|
||||
print_("Fixing r/ capitalization...")
|
||||
entry = fix_r_caps(entry)
|
||||
print_("Fix formatting of subreddit...")
|
||||
|
@ -403,44 +380,44 @@ def format_all(entry: dict, silent=False):
|
|||
entry = remove_empty_and_similar(entry)
|
||||
print_("Sorting image keys...")
|
||||
entry = sort_image_keys(entry)
|
||||
print_("Extending entries to whiteout...")
|
||||
entry = extend_entries_to_whiteout(entry)
|
||||
print_("Flooring points...")
|
||||
entry = floor_points(entry)
|
||||
|
||||
print_("Validating...")
|
||||
status_code = validate(entry)
|
||||
print_("Completed!")
|
||||
return ( entry, status_code )
|
||||
return entry
|
||||
|
||||
def format_all_entries(entries):
|
||||
for i in range(len(entries)):
|
||||
try:
|
||||
entry_formatted = format_all(entries[i], True)
|
||||
validation_status = validate(entries[i])
|
||||
if validation_status > 2:
|
||||
print(f"Entry {entry_formatted['id']} will be removed! {json.dumps(entry_formatted)}")
|
||||
entries[i] = None
|
||||
else:
|
||||
entries[i] = entry_formatted
|
||||
except Exception:
|
||||
print(f"Exception occured when formatting ID {entries[i]['id']}")
|
||||
print(traceback.format_exc())
|
||||
if not (i % 200):
|
||||
print(f"{i} checked.")
|
||||
|
||||
def go(path):
|
||||
|
||||
print(f"Formatting {path}...")
|
||||
|
||||
with open(path, "r+", encoding='UTF-8') as f1:
|
||||
entries = json.loads(f1.read())
|
||||
|
||||
format_all_entries(entries)
|
||||
|
||||
print(f"{len(entries)} checked. Writing...")
|
||||
|
||||
with open(path, "w", encoding='utf-8', newline='\n') as f2:
|
||||
f2.write(per_line_entries(entries))
|
||||
|
||||
print("Writing completed. All done.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
def go(path):
|
||||
|
||||
print(f"Formatting {path}...")
|
||||
|
||||
with open(path, "r+", encoding='UTF-8') as f1:
|
||||
entries = json.loads(f1.read())
|
||||
|
||||
for i in range(len(entries)):
|
||||
try:
|
||||
entry_formatted, validation_status = format_all(entries[i], True)
|
||||
if validation_status > 2:
|
||||
print(f"Entry {entry_formatted['id']} will be removed! {json.dumps(entry_formatted)}")
|
||||
entries[i] = None
|
||||
else:
|
||||
entries[i] = entry_formatted
|
||||
except Exception:
|
||||
print(f"Exception occured when formatting ID {entries[i]['id']}")
|
||||
print(traceback.format_exc())
|
||||
if not (i % 200):
|
||||
print(f"{i} checked.")
|
||||
|
||||
print(f"{len(entries)} checked. Writing...")
|
||||
|
||||
with open(path, "w", encoding='utf-8', newline='\n') as f2:
|
||||
f2.write(per_line_entries(entries))
|
||||
|
||||
print("Writing completed. All done.")
|
||||
|
||||
go("../web/atlas.json")
|
||||
|
|
|
@ -1,13 +1,30 @@
|
|||
import json
|
||||
from formatter import per_line_entries
|
||||
import os
|
||||
import formatter
|
||||
import scale_back
|
||||
|
||||
from scale_back import ScaleConfig
|
||||
|
||||
merge_source_file = 'temp_atlas.json'
|
||||
|
||||
with open(merge_source_file, 'r', encoding='UTF-8') as f1:
|
||||
out_json = json.loads(f1.read())
|
||||
|
||||
formatter.format_all_entries(out_json)
|
||||
|
||||
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
|
||||
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
|
||||
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
|
||||
scale_back.scale_back_entries(out_json)
|
||||
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
|
||||
scale_back.scale_back_entries(out_json)
|
||||
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
|
||||
scale_back.scale_back_entries(out_json)
|
||||
|
||||
out_ids = set()
|
||||
out_dupe_ids = set()
|
||||
atlas_ids = {}
|
||||
|
||||
with open('temp_atlas.json', 'r', encoding='utf-8') as out_file:
|
||||
out_json = json.loads(out_file.read())
|
||||
|
||||
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
||||
atlas_json = json.loads(atlas_file.read())
|
||||
|
||||
|
@ -43,7 +60,7 @@ for entry in out_json:
|
|||
|
||||
print('Writing...')
|
||||
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
|
||||
atlas_file.write(per_line_entries(atlas_json))
|
||||
atlas_file.write(formatter.per_line_entries(atlas_json))
|
||||
|
||||
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
|
||||
with open('read-ids-temp.txt', 'r', encoding='utf-8') as read_ids_temp_file:
|
||||
|
|
|
@ -28,8 +28,7 @@ import json
|
|||
import time
|
||||
import re
|
||||
import traceback
|
||||
from formatter import format_all
|
||||
from migrate_atlas_format import migrate_atlas_format
|
||||
from formatter import format_all, validate
|
||||
|
||||
OUT_FILE = open('temp_atlas.json', 'w', encoding='utf-8')
|
||||
READ_IDS_FILE = open('read-ids-temp.txt', 'w')
|
||||
|
@ -45,7 +44,7 @@ with open('credentials', 'r') as file:
|
|||
password = credentials[3].strip() if len(credentials) > 3 else ""
|
||||
|
||||
reddit = praw.Reddit(
|
||||
client_id=client_id,
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
username=username,
|
||||
password=password,
|
||||
|
@ -86,7 +85,7 @@ for submission in reddit.subreddit('placeAtlas2').new(limit=2000):
|
|||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
|
||||
if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry":
|
||||
|
||||
try:
|
||||
|
@ -113,19 +112,18 @@ for submission in reddit.subreddit('placeAtlas2').new(limit=2000):
|
|||
else:
|
||||
|
||||
assert submission_json["id"] == 0, "Edit invalid because ID is tampered, it must be 0!"
|
||||
|
||||
|
||||
submission_json_dummy = {"id": submission.id}
|
||||
|
||||
for key in submission_json:
|
||||
if not key in submission_json_dummy:
|
||||
submission_json_dummy[key] = submission_json[key];
|
||||
(submission_json, validation_status) = format_all(submission_json_dummy, True)
|
||||
|
||||
submission_json = format_all(submission_json_dummy, True)
|
||||
validation_status = validate(submission_json)
|
||||
|
||||
assert validation_status < 3, \
|
||||
"Submission invalid after validation. This may be caused by not enough points on the path."
|
||||
|
||||
submission_json = migrate_atlas_format(submission_json)
|
||||
|
||||
add_comma_line = len(OUT_FILE_LINES) - 2
|
||||
if len(OUT_FILE_LINES[add_comma_line]) > 2:
|
||||
OUT_FILE_LINES[add_comma_line] = OUT_FILE_LINES[add_comma_line].replace('\n', ',\n')
|
||||
|
|
208
tools/scale_back.py
Normal file
208
tools/scale_back.py
Normal file
|
@ -0,0 +1,208 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import json
|
||||
import traceback
|
||||
import numpy
|
||||
from PIL import Image, ImageDraw
|
||||
import gc
|
||||
|
||||
"""
|
||||
# 166 to 164 with reference of 165
|
||||
shrink
|
||||
166
|
||||
164
|
||||
20
|
||||
web\_img\canvas\place30\159.png
|
||||
web\_img\canvas\place30\163_159.png
|
||||
|
||||
# 166 to 165 with reference of 166
|
||||
shrink
|
||||
166
|
||||
165
|
||||
20
|
||||
web\_img\canvas\place30\159.png
|
||||
web\_img\canvas\place30\164_159.png
|
||||
|
||||
# 164 to 165 with reference of 165
|
||||
shrink
|
||||
164
|
||||
165
|
||||
20
|
||||
web\_img\canvas\place30\159.png
|
||||
web\_img\canvas\place30\163_159.png
|
||||
|
||||
# 166 to 167 with reference of 167
|
||||
expand
|
||||
166
|
||||
167
|
||||
20
|
||||
web\_img\canvas\place30\159.png
|
||||
web\_img\canvas\place30\165_159.png
|
||||
"""
|
||||
|
||||
class ScaleConfig:
|
||||
type = 'expand'
|
||||
source = ''
|
||||
destination = ''
|
||||
threshold = 20
|
||||
image1 = ''
|
||||
image2 = ''
|
||||
|
||||
def swap_source_dest(source, destination, image2):
|
||||
ScaleConfig.source = source
|
||||
ScaleConfig.destination = destination
|
||||
ScaleConfig.image2 = image2
|
||||
|
||||
def remove_white(entry: dict):
|
||||
|
||||
canvas_ref = Image.new('RGBA', (2000,2000))
|
||||
|
||||
with Image.open(ScaleConfig.image1).convert('RGBA') as image1:
|
||||
if ScaleConfig.image2:
|
||||
with Image.open(ScaleConfig.image2).convert('RGBA') as image2:
|
||||
canvas_ref.paste(image1, (0, 0), image1)
|
||||
canvas_ref.paste(image2, (0, 0), image2)
|
||||
canvas_ref
|
||||
else:
|
||||
canvas_ref.paste(image1, (0, 0), image1)
|
||||
|
||||
# uncomment when you need to see the source canvas
|
||||
# canvas_ref.show()
|
||||
|
||||
# print(entry['path'])
|
||||
|
||||
for (period, polygonList) in entry['path'].items():
|
||||
|
||||
if not f"-{ScaleConfig.source}" in period: continue
|
||||
|
||||
# Get bounding rectangle and have a list of tuples for polygon
|
||||
|
||||
polygon = []
|
||||
x_box = 2000
|
||||
y_box = 2000
|
||||
x_box2 = 0
|
||||
y_box2 = 0
|
||||
|
||||
for point in polygonList:
|
||||
x_box = min(x_box, max(point[0] - 1.5, 0))
|
||||
y_box = min(y_box, max(point[1] - 1.5, 0))
|
||||
x_box2 = max(x_box2, min(point[0] + 1.5, 2000))
|
||||
y_box2 = max(y_box2, min(point[1] + 1.5, 2000))
|
||||
polygon.append(tuple(point))
|
||||
|
||||
x_box = int(x_box)
|
||||
y_box = int(y_box)
|
||||
x_box2 = int(x_box2)
|
||||
y_box2 = int(y_box2)
|
||||
|
||||
# Crop the image based on polygon
|
||||
# https://stackoverflow.com/questions/22588074/
|
||||
|
||||
imArray = numpy.asarray(canvas_ref)
|
||||
|
||||
with Image.new('L', (imArray.shape[1], imArray.shape[0]), 0) as maskIm:
|
||||
ImageDraw.Draw(maskIm).polygon(polygon, outline=1, fill=1)
|
||||
mask = numpy.array(maskIm)
|
||||
newImArray = numpy.empty(imArray.shape,dtype='uint8')
|
||||
|
||||
|
||||
newImArray[:,:,:3] = imArray[:,:,:3]
|
||||
newImArray[:,:,3] = mask*255
|
||||
|
||||
imArray = newImArray[y_box:y_box2,x_box:x_box2,:]
|
||||
|
||||
# points = numpy.array([polygon])
|
||||
# print(points)
|
||||
# print(cv2.boundingRect(points[0]))
|
||||
# print(1)
|
||||
# print(imArray)
|
||||
|
||||
colored_pixel_count: int = 0
|
||||
all_pixel_count: int = 0
|
||||
|
||||
# Read the area based on bounding box
|
||||
|
||||
for x in imArray:
|
||||
for pixel in x:
|
||||
if pixel[3] == 0: continue
|
||||
all_pixel_count += 1
|
||||
if (pixel[1] == 255 and pixel[2] == 255): continue
|
||||
colored_pixel_count += 1
|
||||
|
||||
if all_pixel_count == 0: break
|
||||
|
||||
colorness = (100 * colored_pixel_count)/all_pixel_count
|
||||
|
||||
if (ScaleConfig.type == "shrink" and colorness < ScaleConfig.threshold) or (ScaleConfig.type == "expand" and colorness > ScaleConfig.threshold):
|
||||
print(f"[{entry['id']} {period}] {colored_pixel_count}/{all_pixel_count} ({colorness}%)")
|
||||
new_period = period.replace(f'-{ScaleConfig.source}', f'-{ScaleConfig.destination}')
|
||||
entry['path'][new_period] = entry['path'][period]
|
||||
del entry['path'][period]
|
||||
entry['center'][new_period] = entry['center'][period]
|
||||
del entry['center'][period]
|
||||
break
|
||||
# newIm = Image.fromarray(newImArray, "RGBA")
|
||||
# newIm.show()
|
||||
|
||||
break
|
||||
|
||||
return entry
|
||||
|
||||
def per_line_entries(entries: list):
|
||||
"""
|
||||
Returns a string of all the entries, with every entry in one line.
|
||||
"""
|
||||
out = "[\n"
|
||||
for entry in entries:
|
||||
if entry:
|
||||
out += json.dumps(entry, ensure_ascii=False) + ",\n"
|
||||
out = out[:-2] + "\n]"
|
||||
return out
|
||||
|
||||
def format_all(entry: dict, silent=False):
|
||||
def print_(*args, **kwargs):
|
||||
if not silent:
|
||||
print(*args, **kwargs)
|
||||
|
||||
entry = remove_white(entry)
|
||||
print_("Completed!")
|
||||
return entry
|
||||
|
||||
def scale_back_entries(entries):
|
||||
for i in range(len(entries)):
|
||||
try:
|
||||
entry_formatted = format_all(entries[i], True)
|
||||
entries[i] = entry_formatted
|
||||
except Exception:
|
||||
print(f"Exception occured when formatting ID {entries[i]['id']}")
|
||||
print(traceback.format_exc())
|
||||
if not (i % 50):
|
||||
print(f"{i} checked.")
|
||||
gc.collect()
|
||||
|
||||
def go(path):
|
||||
|
||||
print(f"Scaling whiteout for {path}...")
|
||||
|
||||
with open(path, "r+", encoding='UTF-8') as f1:
|
||||
entries = json.loads(f1.read())
|
||||
|
||||
scale_back_entries(entries)
|
||||
|
||||
print(f"{len(entries)} checked. Writing...")
|
||||
|
||||
with open(path, "w", encoding='utf-8', newline='\n') as f2:
|
||||
f2.write(per_line_entries(entries))
|
||||
|
||||
print("Writing completed. All done.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
ScaleConfig.type = input("Type (shrink/expand): ")
|
||||
ScaleConfig.source = input("Source: ")
|
||||
ScaleConfig.destination = input("Destination: ")
|
||||
ScaleConfig.threshold = int(input("Threshold (%): "))
|
||||
ScaleConfig.image1 = input("Reference canvas layer 1: ")
|
||||
ScaleConfig.image2 = input("Reference canvas layer 2: ")
|
||||
|
||||
go("web/atlas.json")
|
6965
web/atlas.json
6965
web/atlas.json
File diff suppressed because one or more lines are too long
Loading…
Add table
Reference in a new issue