atlas/tools/merge_out.py

91 lines
2.8 KiB
Python
Raw Normal View History

import json
import os
2023-03-22 15:18:11 +01:00
from aformatter import format_all_entries, per_line_entries
import scale_back
from scale_back import ScaleConfig
2023-03-21 13:29:18 +01:00
merge_source_file = 'temp-atlas.json'
2022-05-21 23:06:33 +02:00
2022-05-26 18:28:01 +02:00
with open(merge_source_file, 'r', encoding='UTF-8') as f1:
out_json = json.loads(f1.read())
2023-04-03 19:29:31 +02:00
format_all_entries(out_json)
2022-05-21 23:06:33 +02:00
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
2022-05-26 18:28:01 +02:00
scale_back.scale_back_entries(out_json)
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
2022-05-26 18:28:01 +02:00
scale_back.scale_back_entries(out_json)
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
2022-05-26 18:28:01 +02:00
scale_back.scale_back_entries(out_json)
out_ids = set()
out_dupe_ids = set()
atlas_ids = {}
2022-04-09 16:42:05 +02:00
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
atlas_json = json.loads(atlas_file.read())
for i, entry in enumerate(atlas_json):
atlas_ids[entry['id']] = i
2023-06-14 04:04:32 +02:00
last_existing_id = list(atlas_json[-1]['id'])
for entry in out_json:
if entry['id'] == 0 or entry['id'] == '0':
# "Increment" the last ID to derive a new ID.
current_index = -1
while current_index > -(len(last_existing_id)):
current_char = last_existing_id[current_index]
if current_char == 'z':
last_existing_id[current_index] = '0'
current_index -= 1
else:
if current_char == '9':
current_char = 'a'
else:
current_char = chr(ord(current_char) + 1)
last_existing_id[current_index] = current_char
break
entry['id'] = ''.join(last_existing_id)
for entry in out_json:
if entry['id'] in out_ids:
print(f"Entry {entry['id']} has duplicates! Please resolve this conflict. This will be excluded from the merge.")
out_dupe_ids.add(entry['id'])
out_ids.add(entry['id'])
for entry in out_json:
if entry['id'] in out_dupe_ids:
continue
if 'edit' in entry and entry['edit']:
assert entry['id'] in atlas_ids, "Edit failed! ID not found on Atlas."
index = atlas_ids[entry['id']]
assert index != None, "Edit failed! ID not found on Atlas."
print(f"Edited {atlas_json[index]['id']} with {entry['edit']}")
del entry['edit']
atlas_json[index] = entry
elif entry['id'] in atlas_ids:
print(f"Edited {entry['id']} manually.")
atlas_json[atlas_ids[entry['id']]] = entry
else:
print(f"Added {entry['id']}.")
atlas_json.append(entry)
2022-04-09 16:42:05 +02:00
print('Writing...')
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
2023-04-25 08:20:36 +02:00
per_line_entries(atlas_json, atlas_file)
2022-04-09 16:42:05 +02:00
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
2023-03-21 13:29:18 +01:00
with open('temp-read-ids.txt', 'r+', encoding='utf-8') as read_ids_temp_file:
read_ids_file.writelines(read_ids_temp_file.readlines())
2022-10-16 21:03:21 +02:00
read_ids_temp_file.truncate(0)
2022-04-09 16:42:05 +02:00
print('All done.')