2022-04-09 16:07:46 +02:00
|
|
|
import json
|
2022-05-21 22:57:19 +02:00
|
|
|
import os
|
2023-03-22 15:18:11 +01:00
|
|
|
from aformatter import format_all_entries, per_line_entries
|
2023-08-17 22:58:15 +02:00
|
|
|
from scale_back import ScaleConfig
|
|
|
|
import scale_back
|
2023-06-15 19:03:33 +02:00
|
|
|
import traceback
|
2023-07-29 19:05:29 +02:00
|
|
|
|
|
|
|
IS_DEPLOY_PREVIEW = False
|
|
|
|
|
|
|
|
if os.getenv('NETLIFY') == 'true' and os.getenv('CONTEXT') == 'deploy-preview':
|
|
|
|
IS_DEPLOY_PREVIEW = True
|
2022-05-21 22:57:19 +02:00
|
|
|
|
2023-06-15 16:22:35 +02:00
|
|
|
out_ids = []
|
|
|
|
atlas_ids = {}
|
|
|
|
authors = []
|
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
while not os.path.exists('README.md'):
|
|
|
|
os.chdir('..')
|
|
|
|
|
|
|
|
with open('web/all-authors.txt', 'r', encoding='utf-8') as authors_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
authors = authors_file.read().strip().split()
|
2022-05-21 23:06:33 +02:00
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
with open('data/read-ids.txt', 'r', encoding='utf-8') as ids_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
out_ids = ids_file.read().strip().split()
|
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
with open('web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
atlas_data = json.loads(atlas_file.read())
|
2022-05-26 18:28:01 +02:00
|
|
|
|
2023-06-15 16:22:35 +02:00
|
|
|
last_id = 0
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2023-07-29 19:05:29 +02:00
|
|
|
if not IS_DEPLOY_PREVIEW:
|
|
|
|
for i, entry in enumerate(atlas_data):
|
|
|
|
atlas_ids[entry['id']] = i
|
|
|
|
id = entry['id']
|
|
|
|
if type(id) is str and id.isnumeric():
|
|
|
|
id = id.isnumeric()
|
|
|
|
if type(id) is int and id > last_id and id - last_id < 100:
|
|
|
|
last_id = int(id)
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
patches_dir = "data/patches/"
|
|
|
|
permanent_patch_file = "tools/temp-atlas.json"
|
2023-06-15 16:22:35 +02:00
|
|
|
if not os.path.exists(patches_dir):
|
|
|
|
print("Patches folder not found. Exiting.")
|
|
|
|
exit()
|
2022-04-19 03:55:37 +02:00
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
base_image_path = os.path.join('web', '_img', 'canvas', 'place30')
|
2023-06-21 07:08:55 +02:00
|
|
|
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
|
|
|
|
|
2023-06-23 22:30:57 +02:00
|
|
|
filenames = os.listdir(patches_dir)
|
|
|
|
filenames.append(permanent_patch_file)
|
|
|
|
|
|
|
|
for filename in filenames:
|
2023-06-24 21:05:09 +02:00
|
|
|
is_permanent_file = filename == permanent_patch_file
|
|
|
|
if is_permanent_file:
|
2023-06-23 22:30:57 +02:00
|
|
|
f = filename
|
|
|
|
else:
|
|
|
|
f = os.path.join(patches_dir, filename)
|
2022-04-19 03:55:37 +02:00
|
|
|
|
2023-06-15 16:22:35 +02:00
|
|
|
print(f"{filename}: Processing...")
|
2024-01-01 22:53:58 +01:00
|
|
|
|
2023-06-15 16:22:35 +02:00
|
|
|
if not os.path.isfile(f) or not f.endswith('json'):
|
|
|
|
continue
|
2022-04-19 03:55:37 +02:00
|
|
|
|
2023-06-15 19:03:33 +02:00
|
|
|
try:
|
|
|
|
with open(f, 'r', encoding='utf-8') as entry_file:
|
2023-06-21 07:08:55 +02:00
|
|
|
entries = json.loads(entry_file.read())
|
|
|
|
if not isinstance(entries, list):
|
|
|
|
entries = [entries]
|
|
|
|
|
|
|
|
format_all_entries(entries)
|
|
|
|
|
|
|
|
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
|
|
|
|
scale_back.scale_back_entries(entries)
|
|
|
|
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
|
|
|
|
scale_back.scale_back_entries(entries)
|
|
|
|
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
|
|
|
|
scale_back.scale_back_entries(entries)
|
2024-01-01 22:53:58 +01:00
|
|
|
|
|
|
|
# Add TFC if the entry is in the final canvas.
|
|
|
|
for entry in entries:
|
|
|
|
for key in ['path', 'center']:
|
|
|
|
for period, value in entry[key].items():
|
|
|
|
if ('164' in period or '165' in period or '166' in period) and 'T' not in period:
|
|
|
|
entry[key][period + ', T:0-1'] = value
|
|
|
|
del(entry[key][period])
|
|
|
|
break
|
|
|
|
|
2023-06-21 07:08:55 +02:00
|
|
|
for entry in entries:
|
2023-07-29 19:05:29 +02:00
|
|
|
if entry is None:
|
|
|
|
continue
|
2023-06-21 07:08:55 +02:00
|
|
|
if '_reddit_id' in entry:
|
|
|
|
reddit_id = entry['_reddit_id']
|
|
|
|
if reddit_id in out_ids:
|
|
|
|
print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.")
|
|
|
|
continue
|
2023-07-29 19:05:29 +02:00
|
|
|
out_ids.append(reddit_id)
|
2023-06-21 07:08:55 +02:00
|
|
|
del entry['_reddit_id']
|
|
|
|
|
|
|
|
# This wouldn't work if it is an edit.
|
|
|
|
# If needed, we can add a type to the patch to be more foolproof.
|
|
|
|
# if entry['id'] in out_ids:
|
|
|
|
# print(f"{filename}: Submission from {entry['id']} has been included! This will be ignored from the merge.")
|
|
|
|
# continue
|
|
|
|
|
|
|
|
if '_author' in entry:
|
|
|
|
author = entry['_author']
|
|
|
|
if author not in authors:
|
|
|
|
authors.append(author)
|
|
|
|
del entry['_author']
|
|
|
|
|
2023-06-23 23:33:39 +02:00
|
|
|
if isinstance(entry['id'], int) and entry['id'] < 1 or entry['id'] == '0':
|
2023-07-29 19:05:29 +02:00
|
|
|
if IS_DEPLOY_PREVIEW:
|
|
|
|
last_id -= 1
|
|
|
|
else:
|
|
|
|
last_id += 1
|
2023-06-21 07:08:55 +02:00
|
|
|
print(f"{filename}: Entry is new, assigned ID {last_id}")
|
2023-07-29 07:14:29 +02:00
|
|
|
entry['id'] = last_id
|
2023-07-29 19:05:29 +02:00
|
|
|
elif isinstance(entry['id'], str) and entry['id'].isnumeric():
|
|
|
|
entry['id'] = int(entry['id'])
|
|
|
|
elif not is_permanent_file and type(entry['id']) is str and len(entry['id']) > 5 and entry['id'] not in out_ids:
|
|
|
|
out_ids.append(entry['id'])
|
2023-06-21 07:08:55 +02:00
|
|
|
|
|
|
|
if entry['id'] in atlas_ids:
|
|
|
|
index = atlas_ids[entry['id']]
|
|
|
|
print(f"{filename}: Edited {atlas_data[index]['id']}.")
|
|
|
|
atlas_data[index] = entry
|
|
|
|
else:
|
|
|
|
print(f"{filename}: Added {entry['id']}.")
|
|
|
|
atlas_data.append(entry)
|
2023-06-15 19:03:33 +02:00
|
|
|
|
2023-07-29 19:05:29 +02:00
|
|
|
if not is_permanent_file:
|
|
|
|
os.remove(f)
|
2023-06-23 22:30:57 +02:00
|
|
|
|
2023-06-15 19:03:33 +02:00
|
|
|
except:
|
|
|
|
print(f"{filename}: Something went wrong; patch couldn't be implemented. Skipping.")
|
|
|
|
traceback.print_exc()
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-04-09 16:42:05 +02:00
|
|
|
print('Writing...')
|
2023-07-01 18:39:59 +02:00
|
|
|
with open('web/atlas.json', 'w', encoding='utf-8') as atlas_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
per_line_entries(atlas_data, atlas_file)
|
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
with open('data/read-ids.txt', 'w', encoding='utf-8') as ids_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
ids_file.write("\n".join(out_ids) + "\n")
|
2022-04-09 16:42:05 +02:00
|
|
|
|
2023-07-01 18:39:59 +02:00
|
|
|
with open('web/all-authors.txt', 'w', encoding='utf-8') as authors_file:
|
2023-06-15 16:22:35 +02:00
|
|
|
authors_file.write("\n".join(authors) + "\n")
|
2022-04-09 16:42:05 +02:00
|
|
|
|
|
|
|
print('All done.')
|