Integrate whiteout scaling with merging

This commit is contained in:
AnonymousRandomPerson 2022-05-21 16:57:19 -04:00
parent 88fb2ade3b
commit 9437f4cc7c
6 changed files with 99 additions and 101 deletions

View file

@ -9733,13 +9733,14 @@ usmfke
usmdg8
usm0g8
uslwi8
usoilo
uu388f
uu34f8
uu32gh
uu30uk
uu2y6x
uu2w70
uu2uas
uu2rnm
uu2p0n
usoilo
uu388f
uu34f8
uu32gh
uu30uk
uu2y6x
uu2w70
uu2uas
uu2rnm
uu2p0n
uui38k

View file

@ -285,23 +285,6 @@ def sort_image_keys(entry: dict):
return entry
def extend_entries_to_whiteout(entry: dict):
"""
If an entry ends on the final non-whiteout image, extends the image to the last whiteout image where entries cans still be made out.
"""
for outer_key in ["path", "center"]:
image_keys: List[str] = list(entry[outer_key].keys())
for image_key in image_keys:
new_key = None
if NORMAL_IMAGE_SUFFIX in image_key:
new_key = image_key.replace(NORMAL_IMAGE_SUFFIX, WHITEOUT_IMAGE_SUFFIX)
elif image_key == END_NORMAL_IMAGE:
new_key = END_NORMAL_IMAGE + WHITEOUT_IMAGE_SUFFIX
entry[outer_key][new_key] = entry[outer_key][image_key]
del(entry[outer_key][image_key])
return entry
def floor_points(entry: dict):
"""
Floors points on path and center, removing the decimal count.
@ -403,21 +386,6 @@ def print_(*args, **kwargs):
print_("Completed!")
return entry
def format_all_crawl(entry: dict, silent=False):
"""
Format using all the available formatters.
"""
def print_(*args, **kwargs):
if not silent:
print(*args, **kwargs)
format_all(entry, silent)
print_("Extending entries to whiteout...")
entry = extend_entries_to_whiteout(entry)
print_("Completed!")
return entry
if __name__ == '__main__':
def go(path):

View file

@ -1,11 +1,25 @@
import json
import os
import scale_back
from formatter import per_line_entries
from scale_back import ScaleConfig
merge_source_file = 'temp_atlas.json'
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
scale_back.go(merge_source_file)
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
scale_back.go(merge_source_file)
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
scale_back.go(merge_source_file)
out_ids = set()
out_dupe_ids = set()
atlas_ids = {}
with open('temp_atlas.json', 'r', encoding='utf-8') as out_file:
with open(merge_source_file, 'r', encoding='utf-8') as out_file:
out_json = json.loads(out_file.read())
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:

View file

@ -28,7 +28,7 @@
import time
import re
import traceback
from formatter import format_all_crawl, validate
from formatter import format_all, validate
from migrate_atlas_format import migrate_atlas_format
OUT_FILE = open('temp_atlas.json', 'w', encoding='utf-8')
@ -45,7 +45,7 @@
password = credentials[3].strip() if len(credentials) > 3 else ""
reddit = praw.Reddit(
client_id=client_id,
client_id=client_id,
client_secret=client_secret,
username=username,
password=password,
@ -86,7 +86,7 @@ def set_flair(submission, flair):
break
else:
continue
if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry":
try:
@ -113,20 +113,20 @@ def set_flair(submission, flair):
else:
assert submission_json["id"] == 0, "Edit invalid because ID is tampered, it must be 0!"
submission_json_dummy = {"id": submission.id}
for key in submission_json:
if not key in submission_json_dummy:
submission_json_dummy[key] = submission_json[key];
submission_json = format_all_crawl(submission_json_dummy, True)
submission_json = format_all(submission_json_dummy, True)
validation_status = validate(submission_json)
assert validation_status < 3, \
"Submission invalid after validation. This may be caused by not enough points on the path."
submission_json = migrate_atlas_format(submission_json)
add_comma_line = len(OUT_FILE_LINES) - 2
if len(OUT_FILE_LINES[add_comma_line]) > 2:
OUT_FILE_LINES[add_comma_line] = OUT_FILE_LINES[add_comma_line].replace('\n', ',\n')

View file

@ -40,32 +40,40 @@
web\_img\canvas\place30\165_159.png
"""
type = input("Type (shrink/expand): ")
source = input("Source: ")
destination = input("Destination: ")
threshold = int(input("Threshold (%): "))
image1 = input("Reference canvas layer 1: ")
image2 = input("Reference canvas layer 2: ")
canvas_ref = Image.new('RGBA', (2000,2000))
class ScaleConfig:
type = 'expand'
source = ''
destination = ''
threshold = 20
image1 = ''
image2 = ''
with Image.open(image1).convert('RGBA') as image1:
if image2:
with Image.open(image2).convert('RGBA') as image2:
canvas_ref.paste(image1, (0, 0), image1)
canvas_ref.paste(image2, (0, 0), image2)
canvas_ref
else:
canvas_ref.paste(image1, (0, 0), image1)
# uncomment when you need to see the source canvas
# canvas_ref.show()
def swap_source_dest(source, destination, image2):
ScaleConfig.source = source
ScaleConfig.destination = destination
ScaleConfig.image2 = image2
def remove_white(entry: dict):
canvas_ref = Image.new('RGBA', (2000,2000))
with Image.open(ScaleConfig.image1).convert('RGBA') as image1:
if ScaleConfig.image2:
with Image.open(ScaleConfig.image2).convert('RGBA') as image2:
canvas_ref.paste(image1, (0, 0), image1)
canvas_ref.paste(image2, (0, 0), image2)
canvas_ref
else:
canvas_ref.paste(image1, (0, 0), image1)
# uncomment when you need to see the source canvas
# canvas_ref.show()
# print(entry['path'])
for (period, polygonList) in entry['path'].items():
if not f"-{source}" in period: continue
if not f"-{ScaleConfig.source}" in period: continue
# Get bounding rectangle and have a list of tuples for polygon
@ -97,7 +105,7 @@ def remove_white(entry: dict):
mask = numpy.array(maskIm)
newImArray = numpy.empty(imArray.shape,dtype='uint8')
newImArray[:,:,:3] = imArray[:,:,:3]
newImArray[:,:,3] = mask*255
@ -118,16 +126,16 @@ def remove_white(entry: dict):
for pixel in x:
if pixel[3] == 0: continue
all_pixel_count += 1
if (pixel[1] == 255 and pixel[2] == 255): continue
if (pixel[1] == 255 and pixel[2] == 255): continue
colored_pixel_count += 1
if all_pixel_count == 0: break
colorness = (100 * colored_pixel_count)/all_pixel_count
if (type == "shrink" and colorness < threshold) or (type == "expand" and colorness > threshold):
if (ScaleConfig.type == "shrink" and colorness < ScaleConfig.threshold) or (ScaleConfig.type == "expand" and colorness > ScaleConfig.threshold):
print(f"[{entry['id']} {period}] {colored_pixel_count}/{all_pixel_count} ({colorness}%)")
new_period = period.replace(f'-{source}', f'-{destination}')
new_period = period.replace(f'-{ScaleConfig.source}', f'-{ScaleConfig.destination}')
entry['path'][new_period] = entry['path'][period]
del entry['path'][period]
entry['center'][new_period] = entry['center'][period]
@ -135,7 +143,7 @@ def remove_white(entry: dict):
break
# newIm = Image.fromarray(newImArray, "RGBA")
# newIm.show()
break
return entry
@ -155,36 +163,43 @@ def format_all(entry: dict, silent=False):
def print_(*args, **kwargs):
if not silent:
print(*args, **kwargs)
entry = remove_white(entry)
print_("Completed!")
return entry
def go(path):
print(f"Scaling whiteout for {path}...")
with open(path, "r+", encoding='UTF-8') as f1:
entries = json.loads(f1.read())
for i in range(len(entries)):
try:
entry_formatted = format_all(entries[i], True)
entries[i] = entry_formatted
except Exception:
print(f"Exception occured when formatting ID {entries[i]['id']}")
print(traceback.format_exc())
if not (i % 50):
print(f"{i} checked.")
gc.collect()
print(f"{len(entries)} checked. Writing...")
with open(path, "w", encoding='utf-8', newline='\n') as f2:
f2.write(per_line_entries(entries))
print("Writing completed. All done.")
if __name__ == '__main__':
def go(path):
print(f"Formatting {path}...")
with open(path, "r+", encoding='UTF-8') as f1:
entries = json.loads(f1.read())
for i in range(len(entries)):
try:
entry_formatted = format_all(entries[i], True)
entries[i] = entry_formatted
except Exception:
print(f"Exception occured when formatting ID {entries[i]['id']}")
print(traceback.format_exc())
if not (i % 50):
print(f"{i} checked.")
gc.collect()
print(f"{len(entries)} checked. Writing...")
with open(path, "w", encoding='utf-8', newline='\n') as f2:
f2.write(per_line_entries(entries))
print("Writing completed. All done.")
ScaleConfig.type = input("Type (shrink/expand): ")
ScaleConfig.source = input("Source: ")
ScaleConfig.destination = input("Destination: ")
ScaleConfig.threshold = int(input("Threshold (%): "))
ScaleConfig.image1 = input("Reference canvas layer 1: ")
ScaleConfig.image2 = input("Reference canvas layer 2: ")
go("web/atlas.json")

View file

@ -2120,7 +2120,7 @@
{"id": "twvtf9", "name": "Venezuela flag", "description": "Venezuelan flag with typical venezuelan food, the national tree and the Salto Angel, on the of 7 wonders of the world", "links": {"subreddit": ["vzla"]}, "path": {"56-166, T": [[1201, 775], [1281, 774], [1281, 805], [1202, 804], [1201, 791]]}, "center": {"56-166, T": [1242, 789]}},
{"id": "twvta9", "name": "Greggs", "description": "A British fast food bakery shop. Best known for its Sausage Rolls. Many Brits consider this an essential part of the nation.", "links": {"website": ["https://www.greggs.co.uk/"], "subreddit": ["greggs"]}, "path": {"1-164, T": [[701, 476], [701, 482], [753, 482], [753, 476], [721, 476]]}, "center": {"1-164, T": [727, 479]}},
{"id": "twvt8j", "name": "Flag of Trinidad and Tobago", "description": "The national flag of the twin island Republic of Trinidad and Tobago in the Caribbean.", "links": {"website": ["https://en.wikipedia.org/wiki/Trinidad_and_Tobago"], "subreddit": ["TrinidadandTobago"]}, "path": {"109-166, T": [[1373, 1188], [1394, 1188], [1394, 1199], [1373, 1199]]}, "center": {"109-166, T": [1384, 1194]}},
{"id": "twvt8g", "name": "Don't Toy with me, Miss Nagatoro", "description": "From the manga and anime series Don't Toy with me, Miss Nagatoro, created by 774 (Nanashi). At the time of writing this, it ranks as the 50th most popular manga on My Anime List. It is a romantic comedy story where Nagatoro, a dominant, raunchy, and slightly sadistic girl, flirtatiously teases her senpai, a timid and well-mannered guy who is very shy. The series is beloved for its character writing, and extra expressive facial expressions. This art features both of the main characters.", "links": {"website": ["https://www.crunchyroll.com/dont-toy-with-me-miss-nagatoro/episode-1-senpai-is-a-bit-senpai-dont-you-ever-get-angry-811065"], "subreddit": ["nagatoro"]}, "path": {"109-166, T": [[843, 1520], [843, 1564], [880, 1564], [879, 1520], [857, 1522]]}, "center": {"109-166, T": [861, 1543]}},
{"id": "twvt8g", "name": "Don't Toy with me, Miss Nagatoro", "description": "The main characters from the romantic comedy manga/ anime series, Don't Toy With Me, Miss Nagatoro, Created by Nanashi.\nThe story centers around Nagatoro, a slightly sadistic girl, teasing her overly shy senpai.", "links": {"website": ["https://www.crunchyroll.com/dont-toy-with-me-miss-nagatoro/episode-1-senpai-is-a-bit-senpai-dont-you-ever-get-angry-811065"], "subreddit": ["nagatoro"]}, "path": {"109-166, T": [[843, 1520], [843, 1564], [880, 1564], [879, 1520], [857, 1522]]}, "center": {"109-166, T": [861, 1543]}},
{"id": "twvt57", "name": "Bigtoe", "description": "Original character in the My Little Pony universe.", "links": {}, "path": {"125-166, T": [[24, 1093], [31, 1093], [31, 1090], [41, 1090], [41, 1107], [34, 1107], [34, 1104], [24, 1104]]}, "center": {"125-166, T": [35, 1098]}},
{"id": "twvt2n", "name": "Terezi Pyrope", "description": "A popular character from the webcomic Homestuck and the phrase 'BL1ND JUST1C3', one of her soundtracks.", "links": {"website": ["https://mspaintadventures.fandom.com/wiki/Terezi_Pyrope"], "subreddit": ["homestuck"]}, "path": {"129-166, T": [[61, 1096], [92, 1096], [92, 1121], [61, 1121]]}, "center": {"129-166, T": [77, 1109]}},
{"id": "twvt0g", "name": "Peppah", "description": "Yorkshire Terrier pet of the streamer Forsen and his partner and ex-streamer Nani.\n\nAlso known as bibi this adorable dog frequently appeared on stream becoming part of numerous memes due to her apparent similarity with a rat.\n\nPeppah and Nani suddenly disappeared under unclear circumstances in early April 2020.\n\n2 years later Forsen's community homages their furry friend with an adapted version of Forsen's stream intro rendition of Peppah, originally drawn by Nani.", "links": {"website": ["https://www.instagram.com/commonpepper"], "subreddit": ["forsen"]}, "path": {"1-165, T": [[719, 944], [732, 944], [732, 954], [719, 954]]}, "center": {"1-165, T": [726, 949]}},