2022-04-09 16:07:46 +02:00
|
|
|
import json
|
2022-05-21 22:57:19 +02:00
|
|
|
import os
|
2022-05-21 23:06:33 +02:00
|
|
|
import formatter
|
2022-05-21 22:57:19 +02:00
|
|
|
import scale_back
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-05-21 22:57:19 +02:00
|
|
|
from scale_back import ScaleConfig
|
|
|
|
|
|
|
|
merge_source_file = 'temp_atlas.json'
|
2022-05-21 23:06:33 +02:00
|
|
|
|
2022-05-26 18:28:01 +02:00
|
|
|
with open(merge_source_file, 'r', encoding='UTF-8') as f1:
|
|
|
|
out_json = json.loads(f1.read())
|
|
|
|
|
|
|
|
formatter.format_all_entries(out_json)
|
2022-05-21 23:06:33 +02:00
|
|
|
|
2022-05-21 22:57:19 +02:00
|
|
|
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
|
|
|
|
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
|
|
|
|
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
out_ids = set()
|
|
|
|
out_dupe_ids = set()
|
|
|
|
atlas_ids = {}
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-04-09 16:42:05 +02:00
|
|
|
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
|
|
|
atlas_json = json.loads(atlas_file.read())
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
for i, entry in enumerate(atlas_json):
|
|
|
|
atlas_ids[entry['id']] = i
|
2022-04-09 16:07:46 +02:00
|
|
|
|
|
|
|
for entry in out_json:
|
2022-05-14 17:53:36 +02:00
|
|
|
if entry['id'] in out_ids:
|
2022-04-09 16:07:46 +02:00
|
|
|
print(f"Entry {entry['id']} has duplicates! Please resolve this conflict. This will be excluded from the merge.")
|
2022-05-14 17:53:36 +02:00
|
|
|
out_dupe_ids.add(entry['id'])
|
|
|
|
out_ids.add(entry['id'])
|
|
|
|
|
2022-04-09 16:07:46 +02:00
|
|
|
for entry in out_json:
|
|
|
|
if entry['id'] in out_dupe_ids:
|
|
|
|
continue
|
|
|
|
|
2022-04-19 03:55:37 +02:00
|
|
|
if 'edit' in entry and entry['edit']:
|
2022-05-14 17:53:36 +02:00
|
|
|
assert entry['id'] in atlas_ids, "Edit failed! ID not found on Atlas."
|
|
|
|
index = atlas_ids[entry['id']]
|
2022-04-19 03:55:37 +02:00
|
|
|
|
|
|
|
assert index != None, "Edit failed! ID not found on Atlas."
|
|
|
|
|
|
|
|
print(f"Edited {atlas_json[index]['id']} with {entry['edit']}")
|
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
del entry['edit']
|
2022-04-09 16:07:46 +02:00
|
|
|
atlas_json[index] = entry
|
2022-05-14 17:53:36 +02:00
|
|
|
elif entry['id'] in atlas_ids:
|
|
|
|
print(f"Edited {entry['id']} manually.")
|
|
|
|
atlas_json[atlas_ids[entry['id']]] = entry
|
2022-04-09 16:07:46 +02:00
|
|
|
else:
|
2022-04-19 03:55:37 +02:00
|
|
|
print(f"Added {entry['id']}.")
|
2022-04-09 16:07:46 +02:00
|
|
|
atlas_json.append(entry)
|
|
|
|
|
2022-04-09 16:42:05 +02:00
|
|
|
print('Writing...')
|
|
|
|
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
|
2022-05-21 23:06:33 +02:00
|
|
|
atlas_file.write(formatter.per_line_entries(atlas_json))
|
2022-04-09 16:42:05 +02:00
|
|
|
|
2022-04-19 03:55:37 +02:00
|
|
|
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
|
|
|
|
with open('read-ids-temp.txt', 'r', encoding='utf-8') as read_ids_temp_file:
|
|
|
|
read_ids_file.writelines(read_ids_temp_file.readlines())
|
2022-04-09 16:42:05 +02:00
|
|
|
|
|
|
|
print('All done.')
|