mirror of
https://github.com/placeAtlas/atlas.git
synced 2024-11-15 14:33:36 +01:00
Integrate whiteout scaling with merging
This commit is contained in:
parent
88fb2ade3b
commit
9437f4cc7c
6 changed files with 99 additions and 101 deletions
|
@ -9743,3 +9743,4 @@ uu2w70
|
||||||
uu2uas
|
uu2uas
|
||||||
uu2rnm
|
uu2rnm
|
||||||
uu2p0n
|
uu2p0n
|
||||||
|
uui38k
|
||||||
|
|
|
@ -285,23 +285,6 @@ def sort_image_keys(entry: dict):
|
||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
def extend_entries_to_whiteout(entry: dict):
|
|
||||||
"""
|
|
||||||
If an entry ends on the final non-whiteout image, extends the image to the last whiteout image where entries cans still be made out.
|
|
||||||
"""
|
|
||||||
for outer_key in ["path", "center"]:
|
|
||||||
image_keys: List[str] = list(entry[outer_key].keys())
|
|
||||||
for image_key in image_keys:
|
|
||||||
new_key = None
|
|
||||||
if NORMAL_IMAGE_SUFFIX in image_key:
|
|
||||||
new_key = image_key.replace(NORMAL_IMAGE_SUFFIX, WHITEOUT_IMAGE_SUFFIX)
|
|
||||||
elif image_key == END_NORMAL_IMAGE:
|
|
||||||
new_key = END_NORMAL_IMAGE + WHITEOUT_IMAGE_SUFFIX
|
|
||||||
entry[outer_key][new_key] = entry[outer_key][image_key]
|
|
||||||
del(entry[outer_key][image_key])
|
|
||||||
|
|
||||||
return entry
|
|
||||||
|
|
||||||
def floor_points(entry: dict):
|
def floor_points(entry: dict):
|
||||||
"""
|
"""
|
||||||
Floors points on path and center, removing the decimal count.
|
Floors points on path and center, removing the decimal count.
|
||||||
|
@ -403,21 +386,6 @@ def print_(*args, **kwargs):
|
||||||
print_("Completed!")
|
print_("Completed!")
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
def format_all_crawl(entry: dict, silent=False):
|
|
||||||
"""
|
|
||||||
Format using all the available formatters.
|
|
||||||
"""
|
|
||||||
def print_(*args, **kwargs):
|
|
||||||
if not silent:
|
|
||||||
print(*args, **kwargs)
|
|
||||||
|
|
||||||
format_all(entry, silent)
|
|
||||||
print_("Extending entries to whiteout...")
|
|
||||||
entry = extend_entries_to_whiteout(entry)
|
|
||||||
|
|
||||||
print_("Completed!")
|
|
||||||
return entry
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
def go(path):
|
def go(path):
|
||||||
|
|
|
@ -1,11 +1,25 @@
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
import scale_back
|
||||||
from formatter import per_line_entries
|
from formatter import per_line_entries
|
||||||
|
|
||||||
|
from scale_back import ScaleConfig
|
||||||
|
|
||||||
|
merge_source_file = 'temp_atlas.json'
|
||||||
|
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
|
||||||
|
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
|
||||||
|
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
|
||||||
|
scale_back.go(merge_source_file)
|
||||||
|
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
|
||||||
|
scale_back.go(merge_source_file)
|
||||||
|
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
|
||||||
|
scale_back.go(merge_source_file)
|
||||||
|
|
||||||
out_ids = set()
|
out_ids = set()
|
||||||
out_dupe_ids = set()
|
out_dupe_ids = set()
|
||||||
atlas_ids = {}
|
atlas_ids = {}
|
||||||
|
|
||||||
with open('temp_atlas.json', 'r', encoding='utf-8') as out_file:
|
with open(merge_source_file, 'r', encoding='utf-8') as out_file:
|
||||||
out_json = json.loads(out_file.read())
|
out_json = json.loads(out_file.read())
|
||||||
|
|
||||||
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
from formatter import format_all_crawl, validate
|
from formatter import format_all, validate
|
||||||
from migrate_atlas_format import migrate_atlas_format
|
from migrate_atlas_format import migrate_atlas_format
|
||||||
|
|
||||||
OUT_FILE = open('temp_atlas.json', 'w', encoding='utf-8')
|
OUT_FILE = open('temp_atlas.json', 'w', encoding='utf-8')
|
||||||
|
@ -119,7 +119,7 @@ def set_flair(submission, flair):
|
||||||
for key in submission_json:
|
for key in submission_json:
|
||||||
if not key in submission_json_dummy:
|
if not key in submission_json_dummy:
|
||||||
submission_json_dummy[key] = submission_json[key];
|
submission_json_dummy[key] = submission_json[key];
|
||||||
submission_json = format_all_crawl(submission_json_dummy, True)
|
submission_json = format_all(submission_json_dummy, True)
|
||||||
validation_status = validate(submission_json)
|
validation_status = validate(submission_json)
|
||||||
|
|
||||||
assert validation_status < 3, \
|
assert validation_status < 3, \
|
||||||
|
|
|
@ -40,17 +40,26 @@
|
||||||
web\_img\canvas\place30\165_159.png
|
web\_img\canvas\place30\165_159.png
|
||||||
"""
|
"""
|
||||||
|
|
||||||
type = input("Type (shrink/expand): ")
|
class ScaleConfig:
|
||||||
source = input("Source: ")
|
type = 'expand'
|
||||||
destination = input("Destination: ")
|
source = ''
|
||||||
threshold = int(input("Threshold (%): "))
|
destination = ''
|
||||||
image1 = input("Reference canvas layer 1: ")
|
threshold = 20
|
||||||
image2 = input("Reference canvas layer 2: ")
|
image1 = ''
|
||||||
|
image2 = ''
|
||||||
|
|
||||||
|
def swap_source_dest(source, destination, image2):
|
||||||
|
ScaleConfig.source = source
|
||||||
|
ScaleConfig.destination = destination
|
||||||
|
ScaleConfig.image2 = image2
|
||||||
|
|
||||||
|
def remove_white(entry: dict):
|
||||||
|
|
||||||
canvas_ref = Image.new('RGBA', (2000,2000))
|
canvas_ref = Image.new('RGBA', (2000,2000))
|
||||||
|
|
||||||
with Image.open(image1).convert('RGBA') as image1:
|
with Image.open(ScaleConfig.image1).convert('RGBA') as image1:
|
||||||
if image2:
|
if ScaleConfig.image2:
|
||||||
with Image.open(image2).convert('RGBA') as image2:
|
with Image.open(ScaleConfig.image2).convert('RGBA') as image2:
|
||||||
canvas_ref.paste(image1, (0, 0), image1)
|
canvas_ref.paste(image1, (0, 0), image1)
|
||||||
canvas_ref.paste(image2, (0, 0), image2)
|
canvas_ref.paste(image2, (0, 0), image2)
|
||||||
canvas_ref
|
canvas_ref
|
||||||
|
@ -60,12 +69,11 @@
|
||||||
# uncomment when you need to see the source canvas
|
# uncomment when you need to see the source canvas
|
||||||
# canvas_ref.show()
|
# canvas_ref.show()
|
||||||
|
|
||||||
def remove_white(entry: dict):
|
|
||||||
# print(entry['path'])
|
# print(entry['path'])
|
||||||
|
|
||||||
for (period, polygonList) in entry['path'].items():
|
for (period, polygonList) in entry['path'].items():
|
||||||
|
|
||||||
if not f"-{source}" in period: continue
|
if not f"-{ScaleConfig.source}" in period: continue
|
||||||
|
|
||||||
# Get bounding rectangle and have a list of tuples for polygon
|
# Get bounding rectangle and have a list of tuples for polygon
|
||||||
|
|
||||||
|
@ -125,9 +133,9 @@ def remove_white(entry: dict):
|
||||||
|
|
||||||
colorness = (100 * colored_pixel_count)/all_pixel_count
|
colorness = (100 * colored_pixel_count)/all_pixel_count
|
||||||
|
|
||||||
if (type == "shrink" and colorness < threshold) or (type == "expand" and colorness > threshold):
|
if (ScaleConfig.type == "shrink" and colorness < ScaleConfig.threshold) or (ScaleConfig.type == "expand" and colorness > ScaleConfig.threshold):
|
||||||
print(f"[{entry['id']} {period}] {colored_pixel_count}/{all_pixel_count} ({colorness}%)")
|
print(f"[{entry['id']} {period}] {colored_pixel_count}/{all_pixel_count} ({colorness}%)")
|
||||||
new_period = period.replace(f'-{source}', f'-{destination}')
|
new_period = period.replace(f'-{ScaleConfig.source}', f'-{ScaleConfig.destination}')
|
||||||
entry['path'][new_period] = entry['path'][period]
|
entry['path'][new_period] = entry['path'][period]
|
||||||
del entry['path'][period]
|
del entry['path'][period]
|
||||||
entry['center'][new_period] = entry['center'][period]
|
entry['center'][new_period] = entry['center'][period]
|
||||||
|
@ -160,11 +168,9 @@ def print_(*args, **kwargs):
|
||||||
print_("Completed!")
|
print_("Completed!")
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
|
||||||
def go(path):
|
def go(path):
|
||||||
|
|
||||||
print(f"Formatting {path}...")
|
print(f"Scaling whiteout for {path}...")
|
||||||
|
|
||||||
with open(path, "r+", encoding='UTF-8') as f1:
|
with open(path, "r+", encoding='UTF-8') as f1:
|
||||||
entries = json.loads(f1.read())
|
entries = json.loads(f1.read())
|
||||||
|
@ -187,4 +193,13 @@ def go(path):
|
||||||
|
|
||||||
print("Writing completed. All done.")
|
print("Writing completed. All done.")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
ScaleConfig.type = input("Type (shrink/expand): ")
|
||||||
|
ScaleConfig.source = input("Source: ")
|
||||||
|
ScaleConfig.destination = input("Destination: ")
|
||||||
|
ScaleConfig.threshold = int(input("Threshold (%): "))
|
||||||
|
ScaleConfig.image1 = input("Reference canvas layer 1: ")
|
||||||
|
ScaleConfig.image2 = input("Reference canvas layer 2: ")
|
||||||
|
|
||||||
go("web/atlas.json")
|
go("web/atlas.json")
|
|
@ -2120,7 +2120,7 @@
|
||||||
{"id": "twvtf9", "name": "Venezuela flag", "description": "Venezuelan flag with typical venezuelan food, the national tree and the Salto Angel, on the of 7 wonders of the world", "links": {"subreddit": ["vzla"]}, "path": {"56-166, T": [[1201, 775], [1281, 774], [1281, 805], [1202, 804], [1201, 791]]}, "center": {"56-166, T": [1242, 789]}},
|
{"id": "twvtf9", "name": "Venezuela flag", "description": "Venezuelan flag with typical venezuelan food, the national tree and the Salto Angel, on the of 7 wonders of the world", "links": {"subreddit": ["vzla"]}, "path": {"56-166, T": [[1201, 775], [1281, 774], [1281, 805], [1202, 804], [1201, 791]]}, "center": {"56-166, T": [1242, 789]}},
|
||||||
{"id": "twvta9", "name": "Greggs", "description": "A British fast food bakery shop. Best known for its Sausage Rolls. Many Brits consider this an essential part of the nation.", "links": {"website": ["https://www.greggs.co.uk/"], "subreddit": ["greggs"]}, "path": {"1-164, T": [[701, 476], [701, 482], [753, 482], [753, 476], [721, 476]]}, "center": {"1-164, T": [727, 479]}},
|
{"id": "twvta9", "name": "Greggs", "description": "A British fast food bakery shop. Best known for its Sausage Rolls. Many Brits consider this an essential part of the nation.", "links": {"website": ["https://www.greggs.co.uk/"], "subreddit": ["greggs"]}, "path": {"1-164, T": [[701, 476], [701, 482], [753, 482], [753, 476], [721, 476]]}, "center": {"1-164, T": [727, 479]}},
|
||||||
{"id": "twvt8j", "name": "Flag of Trinidad and Tobago", "description": "The national flag of the twin island Republic of Trinidad and Tobago in the Caribbean.", "links": {"website": ["https://en.wikipedia.org/wiki/Trinidad_and_Tobago"], "subreddit": ["TrinidadandTobago"]}, "path": {"109-166, T": [[1373, 1188], [1394, 1188], [1394, 1199], [1373, 1199]]}, "center": {"109-166, T": [1384, 1194]}},
|
{"id": "twvt8j", "name": "Flag of Trinidad and Tobago", "description": "The national flag of the twin island Republic of Trinidad and Tobago in the Caribbean.", "links": {"website": ["https://en.wikipedia.org/wiki/Trinidad_and_Tobago"], "subreddit": ["TrinidadandTobago"]}, "path": {"109-166, T": [[1373, 1188], [1394, 1188], [1394, 1199], [1373, 1199]]}, "center": {"109-166, T": [1384, 1194]}},
|
||||||
{"id": "twvt8g", "name": "Don't Toy with me, Miss Nagatoro", "description": "From the manga and anime series Don't Toy with me, Miss Nagatoro, created by 774 (Nanashi). At the time of writing this, it ranks as the 50th most popular manga on My Anime List. It is a romantic comedy story where Nagatoro, a dominant, raunchy, and slightly sadistic girl, flirtatiously teases her senpai, a timid and well-mannered guy who is very shy. The series is beloved for its character writing, and extra expressive facial expressions. This art features both of the main characters.", "links": {"website": ["https://www.crunchyroll.com/dont-toy-with-me-miss-nagatoro/episode-1-senpai-is-a-bit-senpai-dont-you-ever-get-angry-811065"], "subreddit": ["nagatoro"]}, "path": {"109-166, T": [[843, 1520], [843, 1564], [880, 1564], [879, 1520], [857, 1522]]}, "center": {"109-166, T": [861, 1543]}},
|
{"id": "twvt8g", "name": "Don't Toy with me, Miss Nagatoro", "description": "The main characters from the romantic comedy manga/ anime series, Don't Toy With Me, Miss Nagatoro, Created by Nanashi.\nThe story centers around Nagatoro, a slightly sadistic girl, teasing her overly shy senpai.", "links": {"website": ["https://www.crunchyroll.com/dont-toy-with-me-miss-nagatoro/episode-1-senpai-is-a-bit-senpai-dont-you-ever-get-angry-811065"], "subreddit": ["nagatoro"]}, "path": {"109-166, T": [[843, 1520], [843, 1564], [880, 1564], [879, 1520], [857, 1522]]}, "center": {"109-166, T": [861, 1543]}},
|
||||||
{"id": "twvt57", "name": "Bigtoe", "description": "Original character in the My Little Pony universe.", "links": {}, "path": {"125-166, T": [[24, 1093], [31, 1093], [31, 1090], [41, 1090], [41, 1107], [34, 1107], [34, 1104], [24, 1104]]}, "center": {"125-166, T": [35, 1098]}},
|
{"id": "twvt57", "name": "Bigtoe", "description": "Original character in the My Little Pony universe.", "links": {}, "path": {"125-166, T": [[24, 1093], [31, 1093], [31, 1090], [41, 1090], [41, 1107], [34, 1107], [34, 1104], [24, 1104]]}, "center": {"125-166, T": [35, 1098]}},
|
||||||
{"id": "twvt2n", "name": "Terezi Pyrope", "description": "A popular character from the webcomic Homestuck and the phrase 'BL1ND JUST1C3', one of her soundtracks.", "links": {"website": ["https://mspaintadventures.fandom.com/wiki/Terezi_Pyrope"], "subreddit": ["homestuck"]}, "path": {"129-166, T": [[61, 1096], [92, 1096], [92, 1121], [61, 1121]]}, "center": {"129-166, T": [77, 1109]}},
|
{"id": "twvt2n", "name": "Terezi Pyrope", "description": "A popular character from the webcomic Homestuck and the phrase 'BL1ND JUST1C3', one of her soundtracks.", "links": {"website": ["https://mspaintadventures.fandom.com/wiki/Terezi_Pyrope"], "subreddit": ["homestuck"]}, "path": {"129-166, T": [[61, 1096], [92, 1096], [92, 1121], [61, 1121]]}, "center": {"129-166, T": [77, 1109]}},
|
||||||
{"id": "twvt0g", "name": "Peppah", "description": "Yorkshire Terrier pet of the streamer Forsen and his partner and ex-streamer Nani.\n\nAlso known as bibi this adorable dog frequently appeared on stream becoming part of numerous memes due to her apparent similarity with a rat.\n\nPeppah and Nani suddenly disappeared under unclear circumstances in early April 2020.\n\n2 years later Forsen's community homages their furry friend with an adapted version of Forsen's stream intro rendition of Peppah, originally drawn by Nani.", "links": {"website": ["https://www.instagram.com/commonpepper"], "subreddit": ["forsen"]}, "path": {"1-165, T": [[719, 944], [732, 944], [732, 954], [719, 954]]}, "center": {"1-165, T": [726, 949]}},
|
{"id": "twvt0g", "name": "Peppah", "description": "Yorkshire Terrier pet of the streamer Forsen and his partner and ex-streamer Nani.\n\nAlso known as bibi this adorable dog frequently appeared on stream becoming part of numerous memes due to her apparent similarity with a rat.\n\nPeppah and Nani suddenly disappeared under unclear circumstances in early April 2020.\n\n2 years later Forsen's community homages their furry friend with an adapted version of Forsen's stream intro rendition of Peppah, originally drawn by Nani.", "links": {"website": ["https://www.instagram.com/commonpepper"], "subreddit": ["forsen"]}, "path": {"1-165, T": [[719, 944], [732, 944], [732, 954], [719, 954]]}, "center": {"1-165, T": [726, 949]}},
|
||||||
|
|
Loading…
Reference in a new issue