From bfb99e155da9a5b81a8c84ac72d59a8790ec69c8 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Tue, 25 Apr 2023 13:19:52 +0700 Subject: [PATCH 01/21] Add schema-based validation --- .github/workflows/validate-json.yml | 4 +- tools/atlas.schema.json | 115 ++++++++++++++++++++++++++++ tools/ci/requirements.txt | 1 + tools/ci/validate_json.py | 16 +++- 4 files changed, 132 insertions(+), 4 deletions(-) create mode 100644 tools/atlas.schema.json create mode 100644 tools/ci/requirements.txt diff --git a/.github/workflows/validate-json.yml b/.github/workflows/validate-json.yml index b92b01fa..d5a8ef20 100644 --- a/.github/workflows/validate-json.yml +++ b/.github/workflows/validate-json.yml @@ -14,4 +14,6 @@ jobs: - name: Checkout code uses: actions/checkout@v3 - name: Validate JSON - run: python3 tools/ci/validate_json.py web/atlas.json \ No newline at end of file + run: | + pip3 install -r tools/ci/requirements.txt + python3 tools/ci/validate_json.py web/atlas.json tools/atlas.schema.json \ No newline at end of file diff --git a/tools/atlas.schema.json b/tools/atlas.schema.json new file mode 100644 index 00000000..e2d9aa04 --- /dev/null +++ b/tools/atlas.schema.json @@ -0,0 +1,115 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ], + "description": "The ID of the entry. Usually this is the post ID of the new entry submission." + }, + "name": { + "type": "string", + "description": "The short, descriptive name of the entry." + }, + "description": { + "type": "string", + "description": "The description of the entry. that will also be understood by somebody not familiar with the topic. Usually, the first sentence on Wikipedia is a good example." + }, + "links": { + "type": "object", + "description": "The links related to the entry.", + "properties": { + "subreddit": { + "type": "array", + "description": "Subreddits that's either most relevant to the topic, or that was responsible for creating the artwork, excluding the r/.", + "items": { + "type": "string", + "description": "A subreddit that's either most relevant to the topic, or that was responsible for creating the artwork.", + "pattern": "^[A-Za-z0-9][A-Za-z0-9_]{1,20}$", + "minItems": 1 + } + }, + "website": { + "type": "array", + "description": "URL to websites related to the entry, including the http/https protocol. If you're describing a project, the project's main website would be suitable here.", + "items": { + "type": "string", + "description": "The URL to a website related to the entry.", + "pattern": "^https?://[^\\s/$.?#].[^\\s]*$", + "minItems": 1 + } + }, + "discord": { + "type": "array", + "description": "Invite codes of Discord servers related to the entry (excluding discord.gg/)", + "items": { + "type": "string", + "description": "The invite code of a Discord server related to the entry.", + "minItems": 1 + } + }, + "wiki": { + "type": "array", + "description": "Wiki pages related to the entry.", + "items": { + "type": "string", + "description": "The title of the wiki page related to the entry.", + "minItems": 1 + } + } + }, + "additionalProperties": false + }, + "path": { + "type": "object", + "description": "The path of the entry.", + "patternProperties": { + "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { + "type": "array", + "description": "A period containing the path points.", + "items": { + "type": "array", + "description": "A point.", + "items": { + "type": "number" + }, + "minItems": 2, + "maxItems": 2 + }, + "minItems": 3 + } + }, + "additionalProperties": false, + "minProperties": 1 + }, + "center": { + "type": "object", + "description": "The center of the entry.", + "patternProperties": { + "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { + "type": "array", + "description": "A period containing the center point.", + "items": { + "type": "number", + "description": "A point." + }, + "minItems": 2, + "maxItems": 2 + } + }, + "additionalProperties": false, + "minProperties": 1 + } + }, + "required": ["id", "name", "description", "links", "path", "center"], + "additionalItems": true + } +} \ No newline at end of file diff --git a/tools/ci/requirements.txt b/tools/ci/requirements.txt new file mode 100644 index 00000000..7b8f0158 --- /dev/null +++ b/tools/ci/requirements.txt @@ -0,0 +1 @@ +jsonschema \ No newline at end of file diff --git a/tools/ci/validate_json.py b/tools/ci/validate_json.py index 9f2bcfdf..a9fc25d5 100644 --- a/tools/ci/validate_json.py +++ b/tools/ci/validate_json.py @@ -2,13 +2,23 @@ import sys import json +from jsonschema import validate -path = "./../../web/atlas.json" +atlasPath = "./../../web/atlas.json" # path override as 1st param: validate_json.py path_to_file.json if (len(sys.argv) > 1): - path = sys.argv[1] + atlasPath = sys.argv[1] -json.load(open(path, "r", encoding='utf-8')) +schemaPath = "./../atlas.schema.json" + +# schema override as 2nd param: validate_json.py [...] path_to_schema.json +if (len(sys.argv) > 2): + schemaPath = sys.argv[2] + +atlas = json.load(open(atlasPath, "r", encoding='utf-8')) +schema = json.load(open(schemaPath, "r", encoding='utf-8')) + +validate(atlas, schema) print("JSON is valid") \ No newline at end of file From c38cf611cd37536aa347a8f8a8d662f6b54c8937 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Tue, 25 Apr 2023 13:20:36 +0700 Subject: [PATCH 02/21] Write directly, use tqdm --- tools/aformatter.py | 28 ++++++++++++++-------------- tools/merge_out.py | 2 +- tools/oneoff/migrate_atlas_format.py | 24 +++++++++++++----------- tools/requirements.txt | 3 ++- tools/scale_back.py | 21 ++++++++++++--------- 5 files changed, 42 insertions(+), 36 deletions(-) diff --git a/tools/aformatter.py b/tools/aformatter.py index 365a68c0..01913c9b 100644 --- a/tools/aformatter.py +++ b/tools/aformatter.py @@ -1,10 +1,12 @@ #!/usr/bin/python +from io import TextIOWrapper +from typing import List import re import json import math import traceback -from typing import List +import tqdm END_NORMAL_IMAGE = "164" END_WHITEOUT_IMAGE = "166" @@ -302,7 +304,6 @@ def floor_points(entry: dict): return entry - def validate(entry: dict): """ Validates the entry. Catch errors and tell warnings related to the entry. @@ -339,16 +340,17 @@ def validate(entry: dict): print(f"{key} of entry {entry['id']} is still invalid! {entry[key]}") return return_status -def per_line_entries(entries: list): +def per_line_entries(entries: list, file: TextIOWrapper): """ Returns a string of all the entries, with every entry in one line. """ - out = "[\n" - for entry in entries: - if entry: - out += json.dumps(entry, ensure_ascii=False) + ",\n" - out = out[:-2] + "\n]" - return out + file.write("[\n") + line_temp = "" + for entry in tqdm.tqdm(entries): + if line_temp: + file.write(line_temp + ",\n") + line_temp = json.dumps(entry, ensure_ascii=False) + file.write(line_temp + "\n]") def format_all(entry: dict, silent=False): """ @@ -387,7 +389,7 @@ def print_(*args, **kwargs): return entry def format_all_entries(entries): - for i in range(len(entries)): + for i in tqdm.trange(len(entries)): try: entry_formatted = format_all(entries[i], True) validation_status = validate(entries[i]) @@ -399,8 +401,6 @@ def format_all_entries(entries): except Exception: print(f"Exception occured when formatting ID {entries[i]['id']}") print(traceback.format_exc()) - if not (i % 200): - print(f"{i} checked.") def go(path): @@ -411,10 +411,10 @@ def go(path): format_all_entries(entries) - print(f"{len(entries)} checked. Writing...") + print(f"Writing...") with open(path, "w", encoding='utf-8', newline='\n') as f2: - f2.write(per_line_entries(entries)) + per_line_entries(entries, f2) print("Writing completed. All done.") diff --git a/tools/merge_out.py b/tools/merge_out.py index 63181220..97417651 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -81,7 +81,7 @@ print('Writing...') with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file: - atlas_file.write(per_line_entries(atlas_json)) + per_line_entries(atlas_json, atlas_file) with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file: with open('temp-read-ids.txt', 'r+', encoding='utf-8') as read_ids_temp_file: diff --git a/tools/oneoff/migrate_atlas_format.py b/tools/oneoff/migrate_atlas_format.py index 7ec4e157..efdb1c58 100644 --- a/tools/oneoff/migrate_atlas_format.py +++ b/tools/oneoff/migrate_atlas_format.py @@ -7,9 +7,12 @@ - submitted_by removed """ +from io import TextIOWrapper import re import json +import tqdm + END_IMAGE = 166 INIT_CANVAS_RANGE = (1, END_IMAGE) EXPANSION_1_RANGE = (56, END_IMAGE) @@ -73,16 +76,17 @@ def migrate_atlas_format(entry: dict): return toreturn -def per_line_entries(entries: list): +def per_line_entries(entries: list, file: TextIOWrapper): """ Returns a string of all the entries, with every entry in one line. """ - out = "[\n" - for entry in entries: - if entry: - out += json.dumps(entry, ensure_ascii=False) + ",\n" - out = out[:-2] + "\n]" - return out + file.write("[\n") + line_temp = "" + for entry in tqdm.tqdm(entries): + if line_temp: + file.write(line_temp + ",\n") + line_temp = json.dumps(entry, ensure_ascii=False) + file.write(line_temp + "\n]") if __name__ == '__main__': @@ -93,16 +97,14 @@ def go(path): with open(path, "r+", encoding='UTF-8') as f1: entries = json.loads(f1.read()) - for i in range(len(entries)): + for i in tqdm.trange(len(entries)): entry_formatted = migrate_atlas_format(entries[i]) entries[i] = entry_formatted - if not (i % 1000): - print(f"{i} checked.") print(f"{len(entries)} checked. Writing...") with open(path, "w", encoding='utf-8', newline='\n') as f2: - f2.write(per_line_entries(entries)) + per_line_entries(entries, f2) print("Writing completed. All done.") diff --git a/tools/requirements.txt b/tools/requirements.txt index 9d9d90a4..a4bb482c 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1 +1,2 @@ -praw \ No newline at end of file +praw +tqdm \ No newline at end of file diff --git a/tools/scale_back.py b/tools/scale_back.py index 75cd0bf4..9cbccd2e 100644 --- a/tools/scale_back.py +++ b/tools/scale_back.py @@ -1,10 +1,12 @@ #!/usr/bin/python +from io import TextIOWrapper import json import traceback import numpy from PIL import Image, ImageDraw import gc +import tqdm """ # 166 to 164 with reference of 165 @@ -147,16 +149,17 @@ def remove_white(entry: dict): return entry -def per_line_entries(entries: list): +def per_line_entries(entries: list, file: TextIOWrapper): """ Returns a string of all the entries, with every entry in one line. """ - out = "[\n" - for entry in entries: - if entry: - out += json.dumps(entry, ensure_ascii=False) + ",\n" - out = out[:-2] + "\n]" - return out + file.write("[\n") + line_temp = "" + for entry in tqdm.tqdm(entries): + if line_temp: + file.write(line_temp + ",\n") + line_temp = json.dumps(entry, ensure_ascii=False) + file.write(line_temp + "\n]") def format_all(entry: dict, silent=False): def print_(*args, **kwargs): @@ -168,7 +171,7 @@ def print_(*args, **kwargs): return entry def scale_back_entries(entries): - for i in range(len(entries)): + for i in tqdm.trange(len(entries)): try: entry_formatted = format_all(entries[i], True) entries[i] = entry_formatted @@ -191,7 +194,7 @@ def go(path): print(f"{len(entries)} checked. Writing...") with open(path, "w", encoding='utf-8', newline='\n') as f2: - f2.write(per_line_entries(entries)) + per_line_entries(entries, f2) print("Writing completed. All done.") From f1fa8a104f34d610d8cd5e4e387865e51b6a5d6f Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Tue, 25 Apr 2023 13:42:18 +0700 Subject: [PATCH 03/21] Use -1 instead of 0 for new entries Preventing a potential risk of having 0 as the first ever entry, while -1 have a meaning of none --- tools/atlas.schema.json | 3 ++- tools/redditcrawl.py | 4 ++-- web/_js/main/draw.js | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/tools/atlas.schema.json b/tools/atlas.schema.json index e2d9aa04..cf4f764d 100644 --- a/tools/atlas.schema.json +++ b/tools/atlas.schema.json @@ -10,7 +10,8 @@ "type": "string" }, { - "type": "integer" + "type": "integer", + "minimum": 0 } ], "description": "The ID of the entry. Usually this is the post ID of the new entry submission." diff --git a/tools/redditcrawl.py b/tools/redditcrawl.py index 9f5a4103..f64f1cb7 100755 --- a/tools/redditcrawl.py +++ b/tools/redditcrawl.py @@ -103,13 +103,13 @@ def set_flair(submission, flair): if submission.link_flair_text == "Edit Entry": - assert submission_json["id"] != 0, "Edit invalid because ID is tampered, it must not be 0!" + assert submission_json["id"] > 0, "Edit invalid because ID is tampered, it must not be 0 or -1!" submission_json_dummy = {"id": submission_json["id"], "edit": submission.id} else: - assert submission_json["id"] == 0, "Edit invalid because ID is tampered, it must be 0!" + assert submission_json["id"] <= 0, "Addition invalid because ID is tampered, it must be 0 or -1!" submission_json_dummy = {"id": submission.id} diff --git a/web/_js/main/draw.js b/web/_js/main/draw.js index 5cc07c07..cd1c0763 100644 --- a/web/_js/main/draw.js +++ b/web/_js/main/draw.js @@ -260,7 +260,7 @@ function initDraw() { function generateExportObject() { const exportObject = { - id: entryId ?? 0, + id: entryId ?? -1, name: nameField.value, description: descriptionField.value, links: {}, From a3a64f7fc28885490d8ac58c76530ca236b72c8c Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Wed, 17 May 2023 22:45:12 +0700 Subject: [PATCH 04/21] Rename workflow name, add caching --- .github/workflows/validate-json.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/validate-json.yml b/.github/workflows/validate-json.yml index d5a8ef20..36674e66 100644 --- a/.github/workflows/validate-json.yml +++ b/.github/workflows/validate-json.yml @@ -1,4 +1,4 @@ -name: Validate JSON +name: Validate Atlas data on: push: paths: @@ -8,12 +8,19 @@ on: - web/atlas.json jobs: validate: - name: Validate JSON + name: Validate runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v3 - - name: Validate JSON + - name: Cache dependencies + uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Validate run: | pip3 install -r tools/ci/requirements.txt python3 tools/ci/validate_json.py web/atlas.json tools/atlas.schema.json \ No newline at end of file From 21da09fa77f0024e7f95993ea72742cdaf8d65ca Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 1 Jun 2023 10:16:30 +0700 Subject: [PATCH 05/21] Get image layers in parallel --- web/_js/main/time.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/web/_js/main/time.js b/web/_js/main/time.js index 04af6408..f7911f68 100644 --- a/web/_js/main/time.js +++ b/web/_js/main/time.js @@ -106,18 +106,20 @@ async function updateBackground(newPeriod = currentPeriod, newVariation = curren } const canvas = document.createElement('canvas') const context = canvas.getContext('2d') - for await (const url of layerUrls) { + + layers.length = layerUrls.length + await Promise.all(layerUrls.map(async (url, i) => { const imageLayer = new Image() await new Promise(resolve => { imageLayer.onload = () => { context.canvas.width = Math.max(imageLayer.width, context.canvas.width) context.canvas.height = Math.max(imageLayer.height, context.canvas.height) - layers.push(imageLayer) + layers[i] = imageLayer resolve() } imageLayer.src = url }) - } + })) for (const imageLayer of layers) { context.drawImage(imageLayer, 0, 0) From dd469291d60650475c9c904535eb4a3ee4c00bbb Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 1 Jun 2023 13:46:38 +0700 Subject: [PATCH 06/21] Format atlas data For testing --- web/atlas.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web/atlas.json b/web/atlas.json index abd06e00..b4ce160b 100644 --- a/web/atlas.json +++ b/web/atlas.json @@ -1495,7 +1495,7 @@ {"id": "twtde8", "name": "Fattypillow", "description": "Popular czech YouTuber and streamer!", "links": {"subreddit": ["Fattypillow"]}, "path": {"109-165, T:0-1": [[1536, 1233], [1599, 1233], [1599, 1249], [1536, 1249]]}, "center": {"109-165, T:0-1": [1568, 1241]}}, {"id": "twtday", "name": "Mizutsune", "description": "This is the icon of Mizutsune, a popular monster from the Monster Hunter series. It has appeared in several Monster Hunter games and is beloved by its fans for its colourful apperance and the ability to produce bubbles, which led to the nickname \"bubble fox\". In the top-right corner, The initials of the game can be seen. They are written in the color scheme of the France flag, in memoriam of the small neighboring France flag, which was destroyed towards the end. This artwork was allied with the Destiny artwork above, The vegan banner to the left and with the BazzaGazza logo on the right. The whole story can be read here: https://www.reddit.com/r/monster_hunter_place/comments/twx9yq/looking_back_on_rplace_2022_the_full_timeline/", "links": {"website": ["https://monsterhunter.fandom.com/wiki/Mizutsune"], "subreddit": ["MonsterHunter"]}, "path": {"109-166, T:0-1": [[1908, 1562], [1908, 1600], [1874, 1600], [1875, 1562]]}, "center": {"109-166, T:0-1": [1891, 1581]}}, {"id": "twtd6i", "name": "Star Academy (reste du logo)", "description": "Les restes du logo Star Academy (rip petitanj) créé le 04/04/2022 par les doux dingues sur la chaîne Flonflon_musique", "links": {"website": ["https://www.twitch.tv/flonflon_musique"]}, "path": {"109-165, T:0-1": [[1040, 1315], [1038, 1331], [1053, 1332], [1055, 1302]]}, "center": {"109-165, T:0-1": [1047, 1320]}}, -{"id": "twtd4j", "name": "Shiny Chatot", "description": "Chatot is a Normal/Flying-type parrot Pokémon from the Generation 4 Pokémon games (Pokémon Diamond/Pearl). This Chatot has a rare Shiny color, with pink wings instead of the usual blue.\n\nThis art was drawn by Chatot Dungeon, a spinoff group from the Twitch channel Twitch Plays Pokémon that formed in 2014 after getting timed out in chat for saying \"I like Chatot\". The Chatot was later turned Shiny by French Pokémon YouTuber (PokéTuber) Sneaze, whose mascot is a Shiny Chatot named Mastouffe. ", "links": {"website": ["https://www.youtube.com/channel/UCQjurXV2DUU1LU2FiSWamIg", "https://bulbapedia.bulbagarden.net/wiki/Chatot_(Pok%C3%A9mon)"], "subreddit": ["pokemon"]}, "path": {"3-15": [[80, 742], [76, 744], [74, 746], [73, 749], [73, 752], [75, 754], [78, 754], [78, 755], [80, 757], [87, 758], [87, 755], [90, 752], [90, 749], [92, 749], [92, 744], [89, 744], [89, 742]], "16-166, T:0-1": [[82, 743], [80, 745], [80, 748], [76, 750], [74, 752], [74, 754], [73, 755], [73, 758], [75, 760], [78, 760], [78, 761], [80, 763], [85, 764], [87, 762], [87, 761], [90, 758], [90, 755], [92, 755], [92, 750], [89, 750], [89, 748], [86, 745]]}, "center": {"3-15": [83, 749], "16-166, T:0-1": [83, 755]}}, +{"id": "twtd4j", "name": "Shiny Chatot", "description": "Chatot is a Normal/Flying-type parrot Pokémon from the Generation 4 Pokémon games (Pokémon Diamond/Pearl). This Chatot has a rare Shiny color, with pink wings instead of the usual blue.\n\nThis art was drawn by Chatot Dungeon, a spinoff group from the Twitch channel Twitch Plays Pokémon that formed in 2014 after getting timed out in chat for saying \"I like Chatot\". The Chatot was later turned Shiny by French Pokémon YouTuber (PokéTuber) Sneaze, whose mascot is a Shiny Chatot named Mastouffe.", "links": {"website": ["https://www.youtube.com/channel/UCQjurXV2DUU1LU2FiSWamIg", "https://bulbapedia.bulbagarden.net/wiki/Chatot_(Pok%C3%A9mon)"], "subreddit": ["pokemon"]}, "path": {"3-15": [[80, 742], [76, 744], [74, 746], [73, 749], [73, 752], [75, 754], [78, 754], [78, 755], [80, 757], [87, 758], [87, 755], [90, 752], [90, 749], [92, 749], [92, 744], [89, 744], [89, 742]], "16-166, T:0-1": [[82, 743], [80, 745], [80, 748], [76, 750], [74, 752], [74, 754], [73, 755], [73, 758], [75, 760], [78, 760], [78, 761], [80, 763], [85, 764], [87, 762], [87, 761], [90, 758], [90, 755], [92, 755], [92, 750], [89, 750], [89, 748], [86, 745]]}, "center": {"3-15": [83, 749], "16-166, T:0-1": [83, 755]}}, {"id": "twtd3k", "name": "Girls' Frontline", "description": "Girls' Frontline is a turn-based strategy gacha game for mobile, developed by China-based studio MICA Team. In the game, players control androids known as \"T-Dolls\" who all carry versions of real-life firearms. The logo is the acronym of the game with a silhouette of the game's protagonist, M4A1. To the lower right of the logo is a pixelated version of HK416, one of the game's main characters and a member of Squad 404.", "links": {"website": ["https://gf.sunborngame.com/", "https://en.wikipedia.org/wiki/Girls%27_Frontline"], "subreddit": ["girlsfrontline"]}, "path": {"8-20": [[300, 935], [300, 942], [310, 942], [310, 935]], "62-166, T:0-1": [[1765, 725], [1769, 725], [1771, 722], [1768, 720], [1768, 717], [1769, 717], [1769, 715], [1768, 715], [1768, 711], [1769, 711], [1770, 710], [1771, 710], [1772, 707], [1772, 708], [1775, 705], [1776, 705], [1780, 709], [1780, 711], [1779, 711], [1779, 713], [1781, 716], [1783, 717], [1784, 720], [1785, 723], [1787, 726], [1790, 725], [1790, 737], [1790, 742], [1790, 747], [1788, 747], [1787, 748], [1783, 747], [1782, 746], [1780, 745], [1776, 745], [1774, 744], [1772, 743], [1764, 743], [1762, 740], [1762, 739], [1764, 737], [1765, 737]]}, "center": {"8-20": [305, 939], "62-166, T:0-1": [1778, 733]}}, {"id": "twtd2d", "name": "Together Through Time", "description": "The logo for the 2018 album Together Through Time, The first full-length studio album and sixth record by Canadian 80's future space band TWRP. This art was drawn by fan communities across Reddit and Discord.", "links": {"website": ["https://twrp.fandom.com/wiki/Together_Through_Time"], "subreddit": ["TWRP"]}, "path": {"109-166, T:0-1": [[1035, 1774], [1033, 1770], [1032, 1765], [1034, 1761], [1036, 1757], [1040, 1754], [1042, 1753], [1052, 1753], [1055, 1755], [1059, 1760], [1061, 1764], [1061, 1772], [1058, 1776], [1053, 1779], [1039, 1779], [1036, 1777], [1035, 1775]]}, "center": {"109-166, T:0-1": [1047, 1766]}}, {"id": "twtcu7", "name": "Flag of Kenya", "description": "Kenya is a country in East Africa. The flag of Kenya displays its signature Maasai shield.", "links": {"website": ["https://en.wikipedia.org/wiki/Kenya", "https://en.wikipedia.org/wiki/Flag_of_Kenya"], "subreddit": ["Kenya"]}, "path": {"56-108": [[781, 981], [781, 1000], [796, 1000], [796, 981]], "31-49": [[697, 984], [697, 999], [728, 999], [728, 984]], "5-30": [[681, 984], [681, 999], [728, 999], [728, 984]], "109-165, T:0-1": [[781, 979], [781, 1017], [796, 1017], [796, 982], [794, 980], [787, 980], [787, 979]]}, "center": {"56-108": [789, 991], "31-49": [713, 992], "5-30": [705, 992], "109-165, T:0-1": [789, 998]}}, @@ -7231,7 +7231,7 @@ {"id": "u8fwzq", "name": "Lucas", "description": "Lucas is the player character and protagonist of Mother 3. This sprite is from his appearance in Snowcap Mountain.", "links": {"website": ["https://earthbound.fandom.com/wiki/Lucas"], "subreddit": ["earthbound"]}, "path": {"160-167, T:0-1": [[1971, 348], [1968, 351], [1968, 352], [1969, 353], [1968, 354], [1970, 356], [1972, 356], [1974, 354], [1973, 353], [1974, 352], [1974, 350], [1972, 348]]}, "center": {"160-167, T:0-1": [1971, 351]}}, {"id": "u8fpy5", "name": "Flanders poppy", "description": "The Flanders poppy, also called the Anzac poppy, is a red flower in the poppy family. They are a symbol of Anzacs (soldiers of the Australian and New Zealand Army Corps), and are worn on November 11 (Remembrance Day) and April 25 (Anzac Day) to commemorate Australian and New Zealander soldiers who died in World War I.\n\nThis art was made by a small Discord.", "links": {"website": ["https://nzhistory.govt.nz/war/anzac-day/poppies", "https://en.wikipedia.org/wiki/Papaver_rhoeas", "https://en.wikipedia.org/wiki/Anzac_Day"]}, "path": {"149-166, T:0-1": [[352, 720], [352, 732], [363, 732], [363, 720]]}, "center": {"149-166, T:0-1": [358, 726]}}, {"id": "u8f1si", "name": "Marisad", "description": "Marisa Kirisame is a character from Touhou Project. In the fan-made anime Fantasy Kaleidoscope ~The Memories of Phantasm~, she is seen crying during one scene. Her funny expression quickly became a popular meme inside the Touhou fandom after being popularized by Touhou YouTuber Chiruno, and was dubbed \"Marisad\" (Marisa + sad). Additionally the emote of Marisa crying spread across many Touhou-related Discord servers, further adding to its popularity.\n\nSmol Marisad was established by r/marisad as a second project on the canvas. It unfortunately had to destroy the old Rick Astley pixel art that used to cover both Tomoko and the Tani logo. Shortly after Smol Marisad started to take shape, r/watamote began drawing Tomoko. After a short period of bickering between the two groups they finally agreed to borders, and an alliance. In the end both were destroyed before the archiving by the streamer Tanizen, who ordered his followers to draw a dog in their place.", "links": {"website": ["https://en.touhouwiki.net/wiki/Marisa_Kirisame"], "subreddit": ["Marisad", "touhou"], "discord": ["UVkWNdhQ"]}, "path": {"109-166, T:0-1": [[1724, 1199], [1724, 1218], [1740, 1218], [1740, 1199]]}, "center": {"109-166, T:0-1": [1732, 1209]}}, -{"id": "u8emqw", "name": "Purple hearts", "description": "A purple heart background. Purple represents love and trust between BTS and their fans, the ARMY.", "links": {"website": [], "subreddit": ["bangtan"]}, "path": {"97-104": [[1950, 175], [1943, 182], [1943, 238], [2000, 238], [2000, 194], [1982, 194], [1982, 195], [1989, 203], [1989, 215], [1976, 229], [1976, 238], [1970, 238], [1970, 229], [1958, 217], [1943, 217], [1943, 199], [1961, 199], [1965, 195], [1964, 194], [1960, 194], [1960, 175]], "69-94": [[1943, 194], [1943, 238], [2000, 238], [2000, 194], [1982, 194], [1982, 195], [1989, 203], [1989, 216], [1976, 229], [1976, 238], [1969, 238], [1969, 228], [1958, 217], [1943, 217], [1943, 199], [1962, 199], [1966, 194]], "111-166, T:0-1": [[1966, 194], [1966, 197], [1961, 197], [1959, 199], [1960, 238], [1976, 238], [1977, 238], [1977, 232], [1978, 231], [1977, 230], [1977, 229], [1969, 221], [1969, 220], [1967, 218], [1967, 216], [1966, 215], [1966, 205], [1968, 203], [1968, 202], [1970, 198], [1973, 195], [1992, 195], [1996, 199], [1996, 201], [1998, 203], [1998, 217], [1996, 219], [1996, 220], [1987, 229], [1987, 230], [1986, 231], [1987, 232], [1987, 238], [1999, 238], [1999, 194]]}, "center": {"97-104": [1953, 227], "69-94": [1954, 227], "111-166, T:0-1": [1967, 231]}}, +{"id": "u8emqw", "name": "Purple hearts", "description": "A purple heart background. Purple represents love and trust between BTS and their fans, the ARMY.", "links": {"subreddit": ["bangtan"]}, "path": {"97-104": [[1950, 175], [1943, 182], [1943, 238], [2000, 238], [2000, 194], [1982, 194], [1982, 195], [1989, 203], [1989, 215], [1976, 229], [1976, 238], [1970, 238], [1970, 229], [1958, 217], [1943, 217], [1943, 199], [1961, 199], [1965, 195], [1964, 194], [1960, 194], [1960, 175]], "69-94": [[1943, 194], [1943, 238], [2000, 238], [2000, 194], [1982, 194], [1982, 195], [1989, 203], [1989, 216], [1976, 229], [1976, 238], [1969, 238], [1969, 228], [1958, 217], [1943, 217], [1943, 199], [1962, 199], [1966, 194]], "111-166, T:0-1": [[1966, 194], [1966, 197], [1961, 197], [1959, 199], [1960, 238], [1976, 238], [1977, 238], [1977, 232], [1978, 231], [1977, 230], [1977, 229], [1969, 221], [1969, 220], [1967, 218], [1967, 216], [1966, 215], [1966, 205], [1968, 203], [1968, 202], [1970, 198], [1973, 195], [1992, 195], [1996, 199], [1996, 201], [1998, 203], [1998, 217], [1996, 219], [1996, 220], [1987, 229], [1987, 230], [1986, 231], [1987, 232], [1987, 238], [1999, 238], [1999, 194]]}, "center": {"97-104": [1953, 227], "69-94": [1954, 227], "111-166, T:0-1": [1967, 231]}}, {"id": "u8el5a", "name": "BTS logo", "description": "A tricolor version of the BTS double trapezoid logo.", "links": {"website": ["https://en.wikipedia.org/wiki/BTS"], "subreddit": ["bangtan"]}, "path": {"113-165, T:0-1": [[1942, 207], [1942, 235], [1943, 235], [1948, 230], [1950, 230], [1955, 235], [1956, 234], [1956, 208], [1955, 207], [1950, 212], [1948, 212], [1943, 207]]}, "center": {"113-165, T:0-1": [1949, 221]}}, {"id": "u8ek3b", "name": "방탄", "description": "Korean for \"bangtan\", the first word of the band Bangtan Sonyeondan (BTS).", "links": {"website": ["https://en.wikipedia.org/wiki/BTS"], "subreddit": ["bangtan"]}, "path": {"111-153": [[1958, 226], [1958, 236], [1973, 236], [1973, 226]], "71-104": [[1983, 228], [1983, 238], [1997, 238], [1997, 228]], "154-166, T:0-1": [[1958, 226], [1958, 235], [1973, 235], [1973, 226]]}, "center": {"111-153": [1966, 231], "71-104": [1990, 233], "154-166, T:0-1": [1966, 231]}}, {"id": "u8eivo", "name": "보라해", "description": "The Korean text \"보라해\" (borahae) is a portmanteau of bora (violet) and saranghae (I love you), and means \"I purple you\". This is a symbol of love between BTS and their fandom, The ARMY, who often associate the color purple with love.", "links": {"website": ["https://www.urbandictionary.com/define.php?term=I%20Purple%20You"], "subreddit": ["bangtan"]}, "path": {"62-104": [[1944, 230], [1944, 238], [1967, 238], [1967, 230]], "112-166, T:0-1": [[1943, 197], [1943, 205], [1965, 205], [1965, 197]]}, "center": {"62-104": [1956, 234], "112-166, T:0-1": [1954, 201]}}, From e336fe452dd697616f1997827eab163cb277eb92 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 8 Jun 2023 11:41:22 +0700 Subject: [PATCH 07/21] Tidy up code --- tools/oneoff/migrate_atlas_format.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/oneoff/migrate_atlas_format.py b/tools/oneoff/migrate_atlas_format.py index efdb1c58..b507c39c 100644 --- a/tools/oneoff/migrate_atlas_format.py +++ b/tools/oneoff/migrate_atlas_format.py @@ -10,7 +10,6 @@ from io import TextIOWrapper import re import json - import tqdm END_IMAGE = 166 From 978757ce8fa48cea0d9716557a5f012cca80f123 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 15 Jun 2023 21:22:35 +0700 Subject: [PATCH 08/21] Implement per-entry patches Instead of one temporary JSON file, contributors now can submit patches in form of separate files per entry, that will be merged by `merge_out.py` without dealing with the potential conflicts to the main `atlas.json`. --- tools/create_patch.py | 36 +++++++++++ tools/merge_out.py | 131 ++++++++++++++++++++------------------ tools/redditcrawl.py | 140 ++++++++++++++++++++++------------------- tools/requirements.txt | 3 +- web/all-authors.txt | 7 +-- 5 files changed, 185 insertions(+), 132 deletions(-) create mode 100644 tools/create_patch.py diff --git a/tools/create_patch.py b/tools/create_patch.py new file mode 100644 index 00000000..8639c076 --- /dev/null +++ b/tools/create_patch.py @@ -0,0 +1,36 @@ +import json +import os +import secrets +from pathlib import Path + +patches_dir = "../data/patches/" +Path(patches_dir).mkdir(parents=True, exist_ok=True) + +entry = None +entry_input = "" + +print("Write your submission entry here.") +while entry is None: + + entry_input += input("> ") + try: + entry = json.loads(entry_input) + except: + pass +print() +print("Entry received!") +print() +print("Enter your username as the attribution to be shown on the about page.") +print("Leave it empty if you don't want to.") +print("You can use your Reddit username. Do not include the \"u/\" part.") +print("You can also your GitHub username, but add \"gh:\" before your username (e.g. \"gh:octocat\")") +author = input("Author: ") + +if author: + entry['_author'] = author + +with open(f'{patches_dir}gh-{secrets.token_hex(2)}-{"-".join(entry["name"].split()).lower()}.json', 'w', encoding='utf-8') as out_file: + out_file.write(json.dumps(entry, ensure_ascii=False)) + +print("Patch created!") +print("You can commit this file directory, after that you can push and create a pull request.") \ No newline at end of file diff --git a/tools/merge_out.py b/tools/merge_out.py index 97417651..ab14b3e5 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -5,87 +5,98 @@ from scale_back import ScaleConfig -merge_source_file = 'temp-atlas.json' +out_ids = [] +atlas_ids = {} +authors = [] -with open(merge_source_file, 'r', encoding='UTF-8') as f1: - out_json = json.loads(f1.read()) +with open('../web/all-authors.txt', 'r') as authors_file: + authors = authors_file.read().strip().split() -format_all_entries(out_json) +with open('../web/read_ids.txt', 'r') as ids_file: + out_ids = ids_file.read().strip().split() + +with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file: + atlas_data = json.loads(atlas_file.read()) + +format_all_entries(atlas_file) base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30') ScaleConfig.image1 = os.path.join(base_image_path, '159.png') scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png')) -scale_back.scale_back_entries(out_json) +scale_back.scale_back_entries(atlas_file) scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png')) -scale_back.scale_back_entries(out_json) +scale_back.scale_back_entries(atlas_file) scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png')) -scale_back.scale_back_entries(out_json) +scale_back.scale_back_entries(atlas_file) -out_ids = set() -out_dupe_ids = set() -atlas_ids = {} +last_id = 0 -with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file: - atlas_json = json.loads(atlas_file.read()) - -for i, entry in enumerate(atlas_json): +for i, entry in enumerate(atlas_data): atlas_ids[entry['id']] = i + id = entry['id'] + if id.isnumeric() and int(id) > last_id and int(id) - last_id < 100: + last_id = int(id) -last_existing_id = list(atlas_json[-1]['id']) +patches_dir = "../data/patches/" +if not os.path.exists(patches_dir): + print("Patches folder not found. Exiting.") + exit() + +for filename in os.listdir(patches_dir): + f = os.path.join(patches_dir, filename) + + print(f"{filename}: Processing...") -for entry in out_json: - if entry['id'] == 0 or entry['id'] == '0': - # "Increment" the last ID to derive a new ID. - current_index = -1 - while current_index > -(len(last_existing_id)): - current_char = last_existing_id[current_index] - - if current_char == 'z': - last_existing_id[current_index] = '0' - current_index -= 1 - else: - if current_char == '9': - current_char = 'a' - else: - current_char = chr(ord(current_char) + 1) - last_existing_id[current_index] = current_char - break - entry['id'] = ''.join(last_existing_id) - -for entry in out_json: - if entry['id'] in out_ids: - print(f"Entry {entry['id']} has duplicates! Please resolve this conflict. This will be excluded from the merge.") - out_dupe_ids.add(entry['id']) - out_ids.add(entry['id']) - -for entry in out_json: - if entry['id'] in out_dupe_ids: + if not os.path.isfile(f) or not f.endswith('json'): continue - if 'edit' in entry and entry['edit']: - assert entry['id'] in atlas_ids, "Edit failed! ID not found on Atlas." - index = atlas_ids[entry['id']] + with open(f, 'r', encoding='utf-8') as entry_file: + entry = json.loads(entry_file.read()) - assert index != None, "Edit failed! ID not found on Atlas." + if '_reddit_id' in entry: + reddit_id = entry['_reddit_id'] + if reddit_id in out_ids: + print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") + continue + out_ids.append(reddit_id) + del entry['_reddit_id'] - print(f"Edited {atlas_json[index]['id']} with {entry['edit']}") + if '_author' in entry: + author = entry['_author'] + if author not in authors: + authors.append(author) + del entry['_author'] - del entry['edit'] - atlas_json[index] = entry - elif entry['id'] in atlas_ids: - print(f"Edited {entry['id']} manually.") - atlas_json[atlas_ids[entry['id']]] = entry - else: - print(f"Added {entry['id']}.") - atlas_json.append(entry) + if entry['id'] in out_ids: + print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") + continue + + if entry['id'] < 1: + last_id += 1 + print(f"{filename}: Entry is new, assigned ID {last_id}") + entry['id'] = str(last_id) + else: + out_ids.append(entry['id']) + + + if entry['id'] in atlas_ids: + index = atlas_ids[entry['id']] + print(f"{filename}: Edited {atlas_data[index]['id']}.") + atlas_data[index] = entry + else: + print(f"{filename}: Added {entry['id']}.") + atlas_data.append(entry) + + os.remove(f) print('Writing...') with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file: - per_line_entries(atlas_json, atlas_file) + per_line_entries(atlas_data, atlas_file) -with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file: - with open('temp-read-ids.txt', 'r+', encoding='utf-8') as read_ids_temp_file: - read_ids_file.writelines(read_ids_temp_file.readlines()) - read_ids_temp_file.truncate(0) +with open('../data/read-ids.txt', 'w', encoding='utf-8') as ids_file: + ids_file.write("\n".join(out_ids) + "\n") + +with open('../web/all-authors.txt', 'w', encoding='utf-8') as authors_file: + authors_file.write("\n".join(authors) + "\n") print('All done.') \ No newline at end of file diff --git a/tools/redditcrawl.py b/tools/redditcrawl.py index f64f1cb7..e53cf295 100755 --- a/tools/redditcrawl.py +++ b/tools/redditcrawl.py @@ -17,74 +17,90 @@ 1. Run the script 2. Input the next ID to use 3. Manually resolve errors in temp-atlas-manual.json -4 a. Use merge_out.py, or... - b. a. Copy temp-atlas.json entries into web/_js/atlas.js (mind the edits!) - b. Copy temp-read-ids.txt IDs into data/read-ids.txt +4. a. Use merge_out.py, or... + b. a. Copy temp-atlas.json entries into web/_js/atlas.js (mind the edits!) + b. Copy temp-read-ids.txt IDs into data/read-ids.txt 5. Create a pull request """ -import praw +from praw import Reddit +from praw.models import Submission import json import time import re import traceback from aformatter import format_all, validate +from pathlib import Path +import humanize +from datetime import datetime +import secrets -with open('temp-atlas.json', 'w', encoding='utf-8') as OUT_FILE, open('temp-read-ids.txt', 'w') as READ_IDS_FILE, open('temp-atlas-manual.txt', 'w', encoding='utf-8') as FAIL_FILE: +patches_dir = "../data/patches/" +Path(patches_dir).mkdir(parents=True, exist_ok=True) - OUT_FILE_LINES = ['[\n', ']\n'] +def set_flair(submission, flair): + if has_write_access and submission.link_flair_text != flair: + flair_choices = submission.flair.choices() + flair = next(x for x in flair_choices if x["flair_text_editable"] and flair == x["flair_text"]) + submission.flair.select(flair["flair_template_id"]) - with open('credentials', 'r') as file: - credentials = file.readlines() - client_id = credentials[0].strip() - client_secret = credentials[1].strip() - username = credentials[2].strip() if len(credentials) > 3 else "" - password = credentials[3].strip() if len(credentials) > 3 else "" - reddit = praw.Reddit( - client_id=client_id, - client_secret=client_secret, - username=username, - password=password, - user_agent='atlas_bot' - ) +with open('credentials', 'r') as file: + credentials = file.readlines() + client_id = credentials[0].strip() + client_secret = credentials[1].strip() + username = credentials[2].strip() if len(credentials) > 3 else "" + password = credentials[3].strip() if len(credentials) > 3 else "" - has_write_access = not reddit.read_only - if not has_write_access: - print("Warning: No write access. Post flairs will not be updated.") - time.sleep(5) +reddit = Reddit( + client_id=client_id, + client_secret=client_secret, + username=username, + password=password, + user_agent='atlas_bot' +) - existing_ids = [] +has_write_access = not reddit.read_only +if not has_write_access: + print("Warning: No write access. Post flairs will not be updated. Waiting 5 seconds...") + # time.sleep(5) - with open('../data/read-ids.txt', 'r') as edit_ids_file: - for id in [x.strip() for x in edit_ids_file.readlines()]: - existing_ids.append(id) +print("Running...") - def set_flair(submission, flair): - if has_write_access and submission.link_flair_text != flair: - flair_choices = submission.flair.choices() - flair = next(x for x in flair_choices if x["flair_text_editable"] and flair == x["flair_text"]) - submission.flair.select(flair["flair_template_id"]) +existing_ids = [] - total_all_flairs = 0 - duplicate_count = 0 - failcount = 0 - successcount = 0 - totalcount = 0 +with open('../data/read-ids.txt', 'r') as edit_ids_file: + for id in [x.strip() for x in edit_ids_file.readlines()]: + existing_ids.append(id) - for submission in reddit.subreddit('placeAtlas2').new(limit=2000): +total_all_flairs = 0 +count_dupe = 0 +count_fail = 0 +count_success = 0 +count_total = 0 + +with open('temp-atlas-manual.txt', 'w', encoding='utf-8') as FAIL_FILE: + + submission: Submission + for submission in reddit.subreddit('u_Hans5958_').new(limit=5): total_all_flairs += 1 - if (submission.id in existing_ids): - set_flair(submission, "Processed Entry") - print("Found first duplicate!") - duplicate_count += 1 - if (duplicate_count > 0): - break - else: - continue + print(f"{submission.id}: Submitted {humanize.naturaltime(datetime.utcnow() - datetime.utcfromtimestamp(submission.created_utc))}.") - if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry": + # print(patches_dir + 'reddit-' + submission.id + '.json') + if submission.id in existing_ids or Path(patches_dir + 'reddit-' + submission.id + '.json').is_file(): + set_flair(submission, "Processed Entry") + print(f"{submission.id}: Submission is a duplicate! Skipped.") + if (count_dupe == 1): + print(f"{submission.id}: Second duplicate. Stopped!") + break + print(f"{submission.id}: First duplicate. Continue running.") + count_dupe += 1 + continue + + print(f"{submission.id}: Processing...") + + if submission.link_flair_text == "New Entry" or submission.link_flair_text == "Edit Entry" or True: try: @@ -102,16 +118,11 @@ def set_flair(submission, flair): if submission_json: if submission.link_flair_text == "Edit Entry": - assert submission_json["id"] > 0, "Edit invalid because ID is tampered, it must not be 0 or -1!" - - submission_json_dummy = {"id": submission_json["id"], "edit": submission.id} - else: - assert submission_json["id"] <= 0, "Addition invalid because ID is tampered, it must be 0 or -1!" - - submission_json_dummy = {"id": submission.id} + + submission_json_dummy = {"id": submission_json["id"], "_reddit_id": submission.id, "_author": submission.author.name} for key in submission_json: if not key in submission_json_dummy: @@ -121,13 +132,11 @@ def set_flair(submission, flair): assert validation_status < 3, \ "Submission invalid after validation. This may be caused by not enough points on the path." + + with open(f'{patches_dir}reddit-{submission.id}-{"-".join(submission["name"].split()).lower()}.json', 'w', encoding='utf-8') as out_file: + out_file.write(json.dumps(submission_json, ensure_ascii=False)) - add_comma_line = len(OUT_FILE_LINES) - 2 - if len(OUT_FILE_LINES[add_comma_line]) > 2: - OUT_FILE_LINES[add_comma_line] = OUT_FILE_LINES[add_comma_line].replace('\n', ',\n') - OUT_FILE_LINES.insert(len(OUT_FILE_LINES) - 1, json.dumps(submission_json, ensure_ascii=False) + '\n') - READ_IDS_FILE.write(submission.id + '\n') - successcount += 1 + count_success += 1 set_flair(submission, "Processed Entry") except Exception as e: @@ -140,12 +149,11 @@ def set_flair(submission, flair): "==== CLEAN ====" + "\n\n" + text + "\n\n" ) - failcount += 1 + count_fail += 1 set_flair(submission, "Rejected Entry") + print(f"{submission.id}: Something went wrong! Rejected.") - print("Wrote " + submission.id + ", submitted " + str(round(time.time()-submission.created_utc)) + " seconds ago") - totalcount += 1 + count_total += 1 + print(f"{submission.id}: Processed!") - OUT_FILE.writelines(OUT_FILE_LINES) - -print(f"\n\nTotal all flairs: {total_all_flairs}\nSuccess: {successcount}/{totalcount}\nFail: {failcount}/{totalcount}\nPlease check temp-atlas-manual.txt for failed entries to manually resolve.") +print(f"\n\nTotal all flairs: {total_all_flairs}\nSuccess: {count_success}/{count_total}\nFail: {count_fail}/{count_total}\nPlease check temp-atlas-manual.txt for failed entries to manually resolve.") diff --git a/tools/requirements.txt b/tools/requirements.txt index a4bb482c..23415418 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,2 +1,3 @@ praw -tqdm \ No newline at end of file +tqdm +humanize \ No newline at end of file diff --git a/web/all-authors.txt b/web/all-authors.txt index 17961726..e3273265 100644 --- a/web/all-authors.txt +++ b/web/all-authors.txt @@ -5075,7 +5075,6 @@ JohnnyHotshot -robotic olly Shadox -Ericbazinga MingCate SlipsSC_ carlyc999 @@ -5128,7 +5127,7 @@ p1terdeN IncestSimulator2016 zephyr12345 Blizhazard -Fishes_Glubs & GamerKingFaiz +GamerKingFaiz Wodgam TheNomad VinsElBins @@ -5160,7 +5159,6 @@ neurospex soopimus_ SporekidX ForsenPlace -scorpion24100 / ThePizzaMuncher Vapku BouchonEnPlastique SailorElei @@ -5298,7 +5296,6 @@ HappyMerlin YummyGummyDrops Forcoy RookeMistake -slanterns raudrin AriaNoire evaroussel @@ -5407,4 +5404,4 @@ Hellmustang0226 tiny2ooons duroki66 Aloxite -Polygonboy0 \ No newline at end of file +Polygonboy0 From 1c1662a83703812704435e1b6e20ab8c4fe69b92 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 15 Jun 2023 21:45:29 +0700 Subject: [PATCH 09/21] Implement per-entry patches 2 --- tools/merge_out.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tools/merge_out.py b/tools/merge_out.py index ab14b3e5..a4bd1b36 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -9,25 +9,25 @@ atlas_ids = {} authors = [] -with open('../web/all-authors.txt', 'r') as authors_file: +with open('../web/all-authors.txt', 'r', encoding='utf-8') as authors_file: authors = authors_file.read().strip().split() -with open('../web/read_ids.txt', 'r') as ids_file: +with open('../data/read-ids.txt', 'r', encoding='utf-8') as ids_file: out_ids = ids_file.read().strip().split() with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file: atlas_data = json.loads(atlas_file.read()) -format_all_entries(atlas_file) +# format_all_entries(atlas_file) -base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30') -ScaleConfig.image1 = os.path.join(base_image_path, '159.png') -scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png')) -scale_back.scale_back_entries(atlas_file) -scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png')) -scale_back.scale_back_entries(atlas_file) -scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png')) -scale_back.scale_back_entries(atlas_file) +# base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30') +# ScaleConfig.image1 = os.path.join(base_image_path, '159.png') +# scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png')) +# scale_back.scale_back_entries(atlas_file) +# scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png')) +# scale_back.scale_back_entries(atlas_file) +# scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png')) +# scale_back.scale_back_entries(atlas_file) last_id = 0 From 6ad26a1b270fdfb8dbff0ac503d485bcfc50a30c Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 15 Jun 2023 22:49:42 +0700 Subject: [PATCH 10/21] Implement per-entry patches 3 --- tools/create_patch.py | 13 +++++++------ tools/merge_out.py | 8 ++++---- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tools/create_patch.py b/tools/create_patch.py index 8639c076..9e283ef3 100644 --- a/tools/create_patch.py +++ b/tools/create_patch.py @@ -9,7 +9,7 @@ entry = None entry_input = "" -print("Write your submission entry here.") +print("Write/paste your JSON-formatted submission data here.") while entry is None: entry_input += input("> ") @@ -18,10 +18,10 @@ except: pass print() -print("Entry received!") +print("Submission is valid!") print() -print("Enter your username as the attribution to be shown on the about page.") -print("Leave it empty if you don't want to.") +print("Enter your username as the attribution to be shown on the About page.") +print("Leave it empty if you don't want to be attributed.") print("You can use your Reddit username. Do not include the \"u/\" part.") print("You can also your GitHub username, but add \"gh:\" before your username (e.g. \"gh:octocat\")") author = input("Author: ") @@ -29,8 +29,9 @@ if author: entry['_author'] = author +filename = f'gh-{secrets.token_hex(2)}-{"-".join(entry["name"].split()).lower()}.json' with open(f'{patches_dir}gh-{secrets.token_hex(2)}-{"-".join(entry["name"].split()).lower()}.json', 'w', encoding='utf-8') as out_file: out_file.write(json.dumps(entry, ensure_ascii=False)) -print("Patch created!") -print("You can commit this file directory, after that you can push and create a pull request.") \ No newline at end of file +print("Patch created as " + filename + "!") +print("You can commit the created file directly, to which you can push and create a pull request after that.") \ No newline at end of file diff --git a/tools/merge_out.py b/tools/merge_out.py index a4bd1b36..bbe8b914 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -18,16 +18,16 @@ with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file: atlas_data = json.loads(atlas_file.read()) -# format_all_entries(atlas_file) +# format_all_entries(atlas_data) # base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30') # ScaleConfig.image1 = os.path.join(base_image_path, '159.png') # scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png')) -# scale_back.scale_back_entries(atlas_file) +# scale_back.scale_back_entries(atlas_data) # scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png')) -# scale_back.scale_back_entries(atlas_file) +# scale_back.scale_back_entries(atlas_data) # scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png')) -# scale_back.scale_back_entries(atlas_file) +# scale_back.scale_back_entries(atlas_data) last_id = 0 From a17b9df743ff3da2437a55c9f1fa69203c098293 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 15 Jun 2023 22:50:08 +0700 Subject: [PATCH 11/21] Update CONTRIBUTING.md regarding per-entry patches and other minor fixes --- CONTRIBUTING.md | 43 +++++++++++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6d41364b..ce433b55 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,8 +6,6 @@ You may contribute to the project by submitting a Pull Request on the GitHub rep ## New Atlas entries -> **Warning**: **WE ONLY ACCEPT NEW ENTRIES ON REDDIT!** - To contribute to the map, we require a certain format for artwork region and labels. This can be generated on [the drawing mode](https://place-atlas.stefanocoding.me?mode=draw) on the website. To add a new entry, go to [the drawing mode](https://place-atlas.stefanocoding.me?mode=draw) and draw a shape/polygon around the region you'd like to describe. You can use the Undo, Redo, and Reset buttons to help you creating a good polygon. Make sure that the lines you're drawing don't form a [self-intersecting polygon](https://upload.wikimedia.org/wikipedia/commons/thumb/0/0f/Complex_polygon.svg/288px-Complex_polygon.svg.png). @@ -26,34 +24,59 @@ When you're happy with the shape you've drawn, press Finish. You will All fields but the name are optional. For example, a country flag doesn't necessarily need a description. -Once you've entered all the information, you'll be presented with a pop-up window containing some [JSON](https://en.wikipedia.org/wiki/JSON)-formatted data. You can press the Post Direct to Reddit button and just press the send button on Reddit, or copy the entire JSON text and [create a new text post on the subreddit](https://www.reddit.com/r/placeAtlas2/submit). You don't need to add any other text; just directly send the data. +Once you've entered all the information, you'll be presented with a pop-up window containing some [JSON](https://en.wikipedia.org/wiki/JSON)-formatted submission data. Depending on the method, there are two preferred methods. + +### Through Reddit + +You can press the Post Direct to Reddit button and just press the send button on Reddit, or copy the entire JSON text and [create a new text post on the subreddit](https://www.reddit.com/r/placeAtlas2/submit). You don't need to add any other text; just directly send the data. Remember to flair your post with New Entry. On New Reddit, click the Flair button on the bottom part, and select New Entry. On Old Reddit, click the select button on the "choose a flair" section instead. +### Through GitHub + +If you know about Git and how to create a pull request on GitHub, you can try create a patch that will be merged, along with other patches, by one of the members. + +You can use the provided `tools/create_patch.py` script. This script helps you to create a working patch, along with additional data such as your name for attribution sakes. Simply run the script inside the `tools/` folder and follow the given instructions. + +If you want to do this manually (e.g. you don't have Python), you can create a patch by creating a `.json` file inside `data/patches`, with the content of the JSON-formatted data that is given earlier. You may add attribution by adding a `_author` key with the value of your Reddit username or your GitHub username plus a `gh:` prefix. + +```json5 +{ + "id": 0, + // ... + "_author": "Hans5958_", + // or... + "_author": "gh:Hans5958", +} +``` + +Once you have successfully created the patch, the file can be committed, and a pull request towards the `cleanup` branch can be created. A member will merge the pull request if it is adequate. + ## Edits to Atlas entries Other than adding new ones, you can edit existing atlas entries. ### Using the web interface -You can use the website to edit single entries easily. On the website, click Edit on an entry box. Afterwards, you are now on the drawing mode, editing the entry, in which you can follow the same instructions as [when creating a new entry](#new-atlas-entries). Upon submitting, please flair it as Edit Entry instead. +You can use the website to edit single entries easily. On the website, click Edit on an entry box. Afterwards, you are now on the drawing mode, editing the entry, in which you can follow the same instructions as [when creating a new entry](#new-atlas-entries). -As an alternative, you can also submit an issue on GitHub using [this form](https://github.com/placeAtlas/atlas/issues/new?assignees=&labels=entry+update&template=edit-entry.yml). +Upon submitting, if you use Reddit, please flair it as Edit Entry instead. The method stays the same if you use GitHub. + +As an alternative, you can also submit an issue on GitHub using [this form](https://github.com/placeAtlas/atlas/issues/new?assignees=&labels=entry+update&template=edit-entry.yml) or report it on our Discord server. ### Manually -Edits are also welcome on this repository through GitHub. You may use GitHub for bulk or large-scale changes, such as removing duplicates. +Edits are also welcome on this repository using Git through GitHub. You may use Git or GitHub for bulk or large-scale changes, such as removing duplicates. -`web/atlas.json` is where the Atlas data is located, in which you can edit on GitHub. Below is an example of an entry. The example has been expanded, but please save it in the way so each line is an entry which is minified. +`web/atlas.json` is where the Atlas data is located, in which you can edit on GitHub. The next section includes an example of an entry. -Upon creating a fork of this repository and pushing the changes, create a Pull Request against the `cleanup` branch. A member will merge the pull request if it is adequate. +Upon creating a fork of this repository and pushing the changes, create a pull request towards the `cleanup` branch. A member will merge the pull request if it is adequate. To help find duplicates, [use the Overlap mode](https://place-atlas.stefanocoding.me?mode=overlap). - ### Example -Hereforth is an example of the structured data. +Hereforth is an example of the structured data. The example has been expanded, but please save it in the way so each line is an entry which is minified. The `aformatter.py` script can help you with this. ```json5 { From da4efb6c60a06f33fbf9042a47ddc635209e9eb0 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Thu, 15 Jun 2023 22:57:35 +0700 Subject: [PATCH 12/21] Use index (position on Atlas data) for temporal order --- web/_js/main/main.js | 4 +++- web/_js/main/view.js | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/web/_js/main/main.js b/web/_js/main/main.js index 3174804f..4a60b860 100644 --- a/web/_js/main/main.js +++ b/web/_js/main/main.js @@ -500,7 +500,9 @@ async function init() { } function updateAtlasAll(atlas = atlasAll) { - for (const entry of atlas) { + for (const index in atlas) { + const entry = atlas[index] + entry._index = index const currentLinks = entry.links entry.links = { website: [], diff --git a/web/_js/main/view.js b/web/_js/main/view.js index 9b209a7b..b4f9e373 100644 --- a/web/_js/main/view.js +++ b/web/_js/main/view.js @@ -370,10 +370,10 @@ function buildObjectsList(filter, sort = defaultSort) { sortFunction = (a, b) => b.name.toLowerCase().localeCompare(a.name.toLowerCase()) break case "newest": - sortFunction = (a, b) => b.id.length - a.id.length || b.id.localeCompare(a.id) + sortFunction = (a, b) => b._index - a._index break case "oldest": - sortFunction = (a, b) => a.id.length - b.id.length || a.id.localeCompare(b.id) + sortFunction = (a, b) => a._index - b._index break case "area": sortFunction = (a, b) => calcPolygonArea(b.path) - calcPolygonArea(a.path) From 4cbee4fee6f6cc8a8c485129d0354dfe3f5ad7a4 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 00:03:33 +0700 Subject: [PATCH 13/21] Implement per-entry patches 4 Continue when patch errors instead of ending the whole process --- tools/merge_out.py | 67 +++++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 31 deletions(-) diff --git a/tools/merge_out.py b/tools/merge_out.py index bbe8b914..ed950695 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -2,6 +2,7 @@ import os from aformatter import format_all_entries, per_line_entries import scale_back +import traceback from scale_back import ScaleConfig @@ -50,44 +51,48 @@ if not os.path.isfile(f) or not f.endswith('json'): continue - with open(f, 'r', encoding='utf-8') as entry_file: - entry = json.loads(entry_file.read()) + try: + with open(f, 'r', encoding='utf-8') as entry_file: + entry = json.loads(entry_file.read()) - if '_reddit_id' in entry: - reddit_id = entry['_reddit_id'] - if reddit_id in out_ids: + if '_reddit_id' in entry: + reddit_id = entry['_reddit_id'] + if reddit_id in out_ids: + print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") + continue + out_ids.append(reddit_id) + del entry['_reddit_id'] + + if '_author' in entry: + author = entry['_author'] + if author not in authors: + authors.append(author) + del entry['_author'] + + if entry['id'] in out_ids: print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") continue - out_ids.append(reddit_id) - del entry['_reddit_id'] - if '_author' in entry: - author = entry['_author'] - if author not in authors: - authors.append(author) - del entry['_author'] + if entry['id'] < 1: + last_id += 1 + print(f"{filename}: Entry is new, assigned ID {last_id}") + entry['id'] = str(last_id) + else: + out_ids.append(entry['id']) - if entry['id'] in out_ids: - print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") - continue + if entry['id'] in atlas_ids: + index = atlas_ids[entry['id']] + print(f"{filename}: Edited {atlas_data[index]['id']}.") + atlas_data[index] = entry + else: + print(f"{filename}: Added {entry['id']}.") + atlas_data.append(entry) - if entry['id'] < 1: - last_id += 1 - print(f"{filename}: Entry is new, assigned ID {last_id}") - entry['id'] = str(last_id) - else: - out_ids.append(entry['id']) + os.remove(f) - - if entry['id'] in atlas_ids: - index = atlas_ids[entry['id']] - print(f"{filename}: Edited {atlas_data[index]['id']}.") - atlas_data[index] = entry - else: - print(f"{filename}: Added {entry['id']}.") - atlas_data.append(entry) - - os.remove(f) + except: + print(f"{filename}: Something went wrong; patch couldn't be implemented. Skipping.") + traceback.print_exc() print('Writing...') with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file: From d3d00b9f0e565980d49c38951027624f4146db62 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 17:13:08 +0700 Subject: [PATCH 14/21] Also validate patches on CI --- .github/workflows/validate-json.yml | 3 +- tools/atlas.schema.json | 116 -------------------------- tools/ci/validate_json.py | 31 +++++-- tools/schema/atlas.json | 121 ++++++++++++++++++++++++++++ tools/schema/patch.json | 16 ++++ 5 files changed, 162 insertions(+), 125 deletions(-) delete mode 100644 tools/atlas.schema.json create mode 100644 tools/schema/atlas.json create mode 100644 tools/schema/patch.json diff --git a/.github/workflows/validate-json.yml b/.github/workflows/validate-json.yml index 36674e66..fb903685 100644 --- a/.github/workflows/validate-json.yml +++ b/.github/workflows/validate-json.yml @@ -23,4 +23,5 @@ jobs: - name: Validate run: | pip3 install -r tools/ci/requirements.txt - python3 tools/ci/validate_json.py web/atlas.json tools/atlas.schema.json \ No newline at end of file + python3 tools/ci/validate_json.py web/atlas.json tools/schema/atlas.json + python3 tools/ci/validate_json.py data/patches tools/schema/patch.json \ No newline at end of file diff --git a/tools/atlas.schema.json b/tools/atlas.schema.json deleted file mode 100644 index cf4f764d..00000000 --- a/tools/atlas.schema.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft-07/schema", - "type": "array", - "items": { - "type": "object", - "properties": { - "id": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer", - "minimum": 0 - } - ], - "description": "The ID of the entry. Usually this is the post ID of the new entry submission." - }, - "name": { - "type": "string", - "description": "The short, descriptive name of the entry." - }, - "description": { - "type": "string", - "description": "The description of the entry. that will also be understood by somebody not familiar with the topic. Usually, the first sentence on Wikipedia is a good example." - }, - "links": { - "type": "object", - "description": "The links related to the entry.", - "properties": { - "subreddit": { - "type": "array", - "description": "Subreddits that's either most relevant to the topic, or that was responsible for creating the artwork, excluding the r/.", - "items": { - "type": "string", - "description": "A subreddit that's either most relevant to the topic, or that was responsible for creating the artwork.", - "pattern": "^[A-Za-z0-9][A-Za-z0-9_]{1,20}$", - "minItems": 1 - } - }, - "website": { - "type": "array", - "description": "URL to websites related to the entry, including the http/https protocol. If you're describing a project, the project's main website would be suitable here.", - "items": { - "type": "string", - "description": "The URL to a website related to the entry.", - "pattern": "^https?://[^\\s/$.?#].[^\\s]*$", - "minItems": 1 - } - }, - "discord": { - "type": "array", - "description": "Invite codes of Discord servers related to the entry (excluding discord.gg/)", - "items": { - "type": "string", - "description": "The invite code of a Discord server related to the entry.", - "minItems": 1 - } - }, - "wiki": { - "type": "array", - "description": "Wiki pages related to the entry.", - "items": { - "type": "string", - "description": "The title of the wiki page related to the entry.", - "minItems": 1 - } - } - }, - "additionalProperties": false - }, - "path": { - "type": "object", - "description": "The path of the entry.", - "patternProperties": { - "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { - "type": "array", - "description": "A period containing the path points.", - "items": { - "type": "array", - "description": "A point.", - "items": { - "type": "number" - }, - "minItems": 2, - "maxItems": 2 - }, - "minItems": 3 - } - }, - "additionalProperties": false, - "minProperties": 1 - }, - "center": { - "type": "object", - "description": "The center of the entry.", - "patternProperties": { - "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { - "type": "array", - "description": "A period containing the center point.", - "items": { - "type": "number", - "description": "A point." - }, - "minItems": 2, - "maxItems": 2 - } - }, - "additionalProperties": false, - "minProperties": 1 - } - }, - "required": ["id", "name", "description", "links", "path", "center"], - "additionalItems": true - } -} \ No newline at end of file diff --git a/tools/ci/validate_json.py b/tools/ci/validate_json.py index a9fc25d5..cf698286 100644 --- a/tools/ci/validate_json.py +++ b/tools/ci/validate_json.py @@ -2,23 +2,38 @@ import sys import json -from jsonschema import validate +from jsonschema import validate, RefResolver +from pathlib import Path, PurePosixPath +import os -atlasPath = "./../../web/atlas.json" +instance_path = "../../web/atlas.json" # path override as 1st param: validate_json.py path_to_file.json if (len(sys.argv) > 1): - atlasPath = sys.argv[1] + instance_path = sys.argv[1] -schemaPath = "./../atlas.schema.json" +schema_path = "../schema/atlas.json" # schema override as 2nd param: validate_json.py [...] path_to_schema.json if (len(sys.argv) > 2): - schemaPath = sys.argv[2] + schema_path = sys.argv[2] -atlas = json.load(open(atlasPath, "r", encoding='utf-8')) -schema = json.load(open(schemaPath, "r", encoding='utf-8')) +relative_path = "file:" + str(PurePosixPath(Path(os.getcwd(), schema_path))) -validate(atlas, schema) +schema = json.load(open(schema_path, "r", encoding='utf-8')) +# exit() + +resolver = RefResolver(relative_path, schema) +if os.path.isdir(instance_path): + for filename in os.listdir(instance_path): + f = os.path.join(instance_path, filename) + print(f) + + instance = json.load(open(f, "r", encoding='utf-8')) + validate(instance, schema, resolver=resolver) +elif os.path.isfile(instance_path): + print(instance_path) + instance = json.load(open(instance_path, "r", encoding='utf-8')) + validate(instance, schema, resolver=resolver) print("JSON is valid") \ No newline at end of file diff --git a/tools/schema/atlas.json b/tools/schema/atlas.json new file mode 100644 index 00000000..ebbcbb18 --- /dev/null +++ b/tools/schema/atlas.json @@ -0,0 +1,121 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "array", + "definitions": { + "entry": { + "type": "object", + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "integer", + "minimum": 0 + } + ], + "description": "The ID of the entry. Usually this is the post ID of the new entry submission." + }, + "name": { + "type": "string", + "description": "The short, descriptive name of the entry." + }, + "description": { + "type": "string", + "description": "The description of the entry. that will also be understood by somebody not familiar with the topic. Usually, the first sentence on Wikipedia is a good example." + }, + "links": { + "type": "object", + "description": "The links related to the entry.", + "properties": { + "subreddit": { + "type": "array", + "description": "Subreddits that's either most relevant to the topic, or that was responsible for creating the artwork, excluding the r/.", + "items": { + "type": "string", + "description": "A subreddit that's either most relevant to the topic, or that was responsible for creating the artwork.", + "pattern": "^[A-Za-z0-9][A-Za-z0-9_]{1,20}$", + "minItems": 1 + } + }, + "website": { + "type": "array", + "description": "URL to websites related to the entry, including the http/https protocol. If you're describing a project, the project's main website would be suitable here.", + "items": { + "type": "string", + "description": "The URL to a website related to the entry.", + "pattern": "^https?://[^\\s/$.?#].[^\\s]*$", + "minItems": 1 + } + }, + "discord": { + "type": "array", + "description": "Invite codes of Discord servers related to the entry (excluding discord.gg/)", + "items": { + "type": "string", + "description": "The invite code of a Discord server related to the entry.", + "minItems": 1 + } + }, + "wiki": { + "type": "array", + "description": "Wiki pages related to the entry.", + "items": { + "type": "string", + "description": "The title of the wiki page related to the entry.", + "minItems": 1 + } + } + }, + "additionalProperties": false + }, + "path": { + "type": "object", + "description": "The path of the entry.", + "patternProperties": { + "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { + "type": "array", + "description": "A period containing the path points.", + "items": { + "type": "array", + "description": "A point.", + "items": { + "type": "number" + }, + "minItems": 2, + "maxItems": 2 + }, + "minItems": 3 + } + }, + "additionalProperties": false, + "minProperties": 1 + }, + "center": { + "type": "object", + "description": "The center of the entry.", + "patternProperties": { + "^(\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?)(, (\\d+(-\\d+)?|\\w+(:\\d+(-\\d+)?)?))*$": { + "type": "array", + "description": "A period containing the center point.", + "items": { + "type": "number", + "description": "A point." + }, + "minItems": 2, + "maxItems": 2 + } + }, + "additionalProperties": false, + "minProperties": 1 + } + }, + "required": ["id", "name", "description", "links", "path", "center"], + "additionalItems": true + } + }, + "items": { + "$ref": "#/definitions/entry" + } +} \ No newline at end of file diff --git a/tools/schema/patch.json b/tools/schema/patch.json new file mode 100644 index 00000000..081aa57a --- /dev/null +++ b/tools/schema/patch.json @@ -0,0 +1,16 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "$ref": "atlas.json#/definitions/entry", + "properties": { + "_author": { + "type": "string", + "description": "Patch only: Author of the entry.", + "minLength": 1 + }, + "_reddit_id": { + "type": "string", + "description": "Patch only: Submission ID, if submitted from Reddit.", + "minLength": 1 + } + } +} \ No newline at end of file From 6eafcefac6b20af0762d458cbf536cb0031030f4 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 17:23:35 +0700 Subject: [PATCH 15/21] Add information regarding per-entry patches when submitting --- web/index.html | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web/index.html b/web/index.html index 82770364..fd2c9c11 100644 --- a/web/index.html +++ b/web/index.html @@ -344,8 +344,10 @@ From 5dca84020e301b957b0f9410419be325e1a3c472 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 17:40:56 +0700 Subject: [PATCH 16/21] Adjust contributors to not just from Reddit --- web/_js/about.js | 32 +++++++++++++++++++++++++------- web/about.html | 6 +++--- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/web/_js/about.js b/web/_js/about.js index 7b444d3a..1dad15c7 100644 --- a/web/_js/about.js +++ b/web/_js/about.js @@ -5,17 +5,35 @@ * Licensed under AGPL-3.0 (https://place-atlas.stefanocoding.me/license.txt) */ -const redditWrapperEl = document.querySelector('#reddit-contributors-wrapper') +const contributorsEl = document.querySelector('#contributors-wrapper') + +// +const gitHubEl = document.createElement("i") +gitHubEl.ariaLabel = "GitHub:" +gitHubEl.className = "bi bi-github" + fetch('all-authors.txt') .then(response => response.text()) - .then(text => text.trim().split('\n').sort()) + .then(text => text.trim().split('\n').sort((a, b) => { + const aSplit = a.split(':') + const bSplit = b.split(':') + return aSplit[aSplit.length - 1] > bSplit[bSplit.length - 1] + })) .then(contributors => { - document.querySelector('#reddit-contributors-count').textContent = contributors.length + document.querySelector('#contributors-count').textContent = contributors.length for (const contributor of contributors) { const userEl = document.createElement('a') - userEl.href = 'https://reddit.com/user/' + contributor - userEl.textContent = contributor - redditWrapperEl.appendChild(userEl) - redditWrapperEl.appendChild(document.createTextNode(' ')) + const contributorSplit = contributor.split(':') + if (contributorSplit[0] === "gh") { + const contributor1 = contributorSplit[1] + userEl.href = 'https://github.com/' + contributor1 + userEl.appendChild(gitHubEl.cloneNode()) + userEl.appendChild(document.createTextNode(' ' + contributor1)) + } else { + userEl.href = 'https://reddit.com/user/' + contributor + userEl.textContent = contributor + } + contributorsEl.appendChild(userEl) + contributorsEl.appendChild(document.createTextNode(' ')) } }) \ No newline at end of file diff --git a/web/about.html b/web/about.html index 44ef6319..da1d4821 100644 --- a/web/about.html +++ b/web/about.html @@ -180,10 +180,10 @@

Project Contributors

-

Reddit Contributors

-

The 2022 Atlas would not have been possible without the help of our Reddit contributors.

+

Contributors

+

The 2022 Atlas would not have been possible without the help of our contributors.

Thank you to everyone who submitted new entries, amended existing ones, reported bugs and just supported the project in general.

-
+
From 5ebf030b99ed0b949feefc599ab208276897a23f Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 17:42:22 +0700 Subject: [PATCH 17/21] Use _ for separator on Python file names --- tools/ci/build-prod.sh | 2 +- tools/ci/{cdn-to-local.py => cdn_to_local.py} | 0 tools/oneoff/{all-authors.py => all_authors.py} | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename tools/ci/{cdn-to-local.py => cdn_to_local.py} (100%) rename tools/oneoff/{all-authors.py => all_authors.py} (100%) diff --git a/tools/ci/build-prod.sh b/tools/ci/build-prod.sh index d73569bc..ebe64fb2 100644 --- a/tools/ci/build-prod.sh +++ b/tools/ci/build-prod.sh @@ -8,7 +8,7 @@ rm -rf .parcel-cache cp -r web/ dist-temp/ npm i -python tools/ci/cdn-to-local.py +python tools/ci/cdn_to_local.py npx parcel build dist-temp/index.html dist-temp/**.html --dist-dir "dist" --no-source-maps --no-content-hash rm -rf dist-temp diff --git a/tools/ci/cdn-to-local.py b/tools/ci/cdn_to_local.py similarity index 100% rename from tools/ci/cdn-to-local.py rename to tools/ci/cdn_to_local.py diff --git a/tools/oneoff/all-authors.py b/tools/oneoff/all_authors.py similarity index 100% rename from tools/oneoff/all-authors.py rename to tools/oneoff/all_authors.py From d626eca5f4585ac47239f5a883962737f295efeb Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 17:54:44 +0700 Subject: [PATCH 18/21] Adjust JSON schema --- tools/schema/atlas.json | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tools/schema/atlas.json b/tools/schema/atlas.json index ebbcbb18..4559efb8 100644 --- a/tools/schema/atlas.json +++ b/tools/schema/atlas.json @@ -12,14 +12,21 @@ }, { "type": "integer", - "minimum": 0 + "minimum": 1 + }, + { + "type": "integer", + "minimum": -1, + "maximum": 0, + "description": "The ID of the entry. The value is a placeholder for new entries." } ], - "description": "The ID of the entry. Usually this is the post ID of the new entry submission." + "description": "The ID of the entry. Usually, this is a number (string or number) this is the post ID of the new entry submission." }, "name": { "type": "string", - "description": "The short, descriptive name of the entry." + "description": "The short, descriptive name of the entry.", + "minLength": 1 }, "description": { "type": "string", @@ -55,7 +62,8 @@ "items": { "type": "string", "description": "The invite code of a Discord server related to the entry.", - "minItems": 1 + "minItems": 1, + "minLength": 1 } }, "wiki": { @@ -64,7 +72,8 @@ "items": { "type": "string", "description": "The title of the wiki page related to the entry.", - "minItems": 1 + "minItems": 1, + "minLength": 1 } } }, From a599b09929bd0095a00b5e2f865487f6db8fab2b Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Fri, 16 Jun 2023 18:01:00 +0700 Subject: [PATCH 19/21] Revert debugging code --- tools/redditcrawl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/redditcrawl.py b/tools/redditcrawl.py index e53cf295..9ad68aad 100755 --- a/tools/redditcrawl.py +++ b/tools/redditcrawl.py @@ -82,7 +82,7 @@ def set_flair(submission, flair): with open('temp-atlas-manual.txt', 'w', encoding='utf-8') as FAIL_FILE: submission: Submission - for submission in reddit.subreddit('u_Hans5958_').new(limit=5): + for submission in reddit.subreddit('placeAtlas2').new(limit=1000): total_all_flairs += 1 print(f"{submission.id}: Submitted {humanize.naturaltime(datetime.utcnow() - datetime.utcfromtimestamp(submission.created_utc))}.") From d47ff9d7047e1b22cfcb9819367f2b09ab58e6d0 Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Sun, 18 Jun 2023 17:02:59 +0700 Subject: [PATCH 20/21] Add contributors from Reddit (July 2022-July 2023) Usually I would use Pushshift, but we don't have it anymore. This is done by seeking the subreddit's timeline and using the search feature. Hopefully this covers all of it. 153 (+ me) new contributors, thank you! --- web/all-authors.txt | 154 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/web/all-authors.txt b/web/all-authors.txt index e3273265..3186ddb8 100644 --- a/web/all-authors.txt +++ b/web/all-authors.txt @@ -5405,3 +5405,157 @@ tiny2ooons duroki66 Aloxite Polygonboy0 +Loic78570 +TheSuperR5 +Obi7199 +Sollydeu +NoNeedleworker531 +SadPinguu +NoelleTGS +christmasmanexists +TheDrew-23 +BurakOdm +Nobodytheinvisible +poundmycake +PewdiepieFanBoi69420 +yaseensherif_ +KingKurto_ +Xtheman1674 +Frolainheu +Grahnolaxwastaken +GwendolynGravers +Mundane_Board_3277 +include_username_h +LuffytheRocky +profemain +Budgerigar17 +guyguy46383758 +Kapt0 +SkayaTheKarp +The-Drumstick-Empire +Downtown-Stand1109 +No_Ad3819 +Hans5958_ +TheKingOfKings75 +Randomcoolvids_YT +DOMOHEAD +Macaroni_TheSecond +TED2622 +Typical_Attention105 +Afraid_Success_4836 +Choice_Ad_9562 +NjordLum +MarvelsSpooderMan +Gagas33 +Mistery_0 +Nacil_54 +moreorlesser +TheRedstoneRazor +Intelligent_Image975 +ThatYugoGuy +m654zy +imskyyc +Eaglewolf13 +Spaceman333_exe +FishingGuppy +cyingbabyy +ESoreos +Veroune_ +Senior_Broccoli5485 +MisssSheep +Licensed2Chill +THE_BATTEUR +Furry_Memelord +usernameista +ventureDIIIIED +beepumbra +bubuarana +Last-Adhesiveness-84 +srurskem +itsnotgood1337 +PrteaRhea +Linkinito +surelychoo +Dizzy-Office-307 +theswannwholaughs +Leotzuafk +DavidCZ200510 +Mathlenormand +hydre39 +Hajimes_acid +FNAFB_true_fan +Canada_LaVearn +Break_Emotional +LukenTosa +Rydoggo5392 +Lait--Fraise +Fishcracks13 +ilyessboui +Ronkad +OrmanRedwood +jamontamo +Pikafreak108 +Pugo0 +Suspicious_Price2037 +Mystichunterz +recitedStrawfox +lsoroc +Lioli_ +Key-Control8107 +How-did-we-get-here3 +r0xANDt0l +sqrtney +mr_terms_and_perms +Greyflex +Chandler8105 +Raider440 +zonkerberg +Strayox +Fincunder +Rexzilarate-2 +kuane2000 +f0rmidablez +PhireKappa +SolkaP7 +Left-Ambition-5127 +Nihekan368 +parkas_ +hydrielax +Sfa11305 +Yeet_Away_The_Pain +Inevitable_Sail_826 +WtvrBro +Evaberer +SunnyM0on +Teblefer +nuwenlee +heevanington +OJack18 +TheRealDunko +Podongos +Muff3ntop +Spyne34 +Enyrox +SkalxV +Consistent_Squirrel +Living_Psychology108 +TapleStape +Eldipypapuh +TrollusRT +skitou +KingSammelot +Adventurous-Rock5765 +AldoSpacewool +tipoima +TempleTerry +IntelligentHat2308 +Hatsuku39 +johnthesoap +ktwombley +SomeFrenchFurry +elijahthetrashman +GamesTheOracle +waddleguin +GDJosef +eri531 +-Yox- From 414ee149ef58161e694e7dcfaf85c2af89edadcc Mon Sep 17 00:00:00 2001 From: Hans5958 Date: Tue, 20 Jun 2023 22:59:29 +0700 Subject: [PATCH 21/21] Fix behavior when merging edit patches --- tools/merge_out.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tools/merge_out.py b/tools/merge_out.py index ed950695..2371f10a 100644 --- a/tools/merge_out.py +++ b/tools/merge_out.py @@ -63,21 +63,23 @@ out_ids.append(reddit_id) del entry['_reddit_id'] + # This wouldn't work if it is an edit. + # If needed, we can add a type to the patch to be more foolproof. + # if entry['id'] in out_ids: + # print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") + # continue + if '_author' in entry: author = entry['_author'] if author not in authors: authors.append(author) del entry['_author'] - if entry['id'] in out_ids: - print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.") - continue - - if entry['id'] < 1: + if entry['id'] is int and entry['id'] < 1: last_id += 1 print(f"{filename}: Entry is new, assigned ID {last_id}") entry['id'] = str(last_id) - else: + elif entry['id'] not in out_ids: out_ids.append(entry['id']) if entry['id'] in atlas_ids: