2022-04-09 16:07:46 +02:00
|
|
|
import json
|
2022-05-21 22:57:19 +02:00
|
|
|
import os
|
2023-03-22 15:18:11 +01:00
|
|
|
from aformatter import format_all_entries, per_line_entries
|
2022-05-21 22:57:19 +02:00
|
|
|
import scale_back
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-05-21 22:57:19 +02:00
|
|
|
from scale_back import ScaleConfig
|
|
|
|
|
2023-03-21 13:29:18 +01:00
|
|
|
merge_source_file = 'temp-atlas.json'
|
2022-05-21 23:06:33 +02:00
|
|
|
|
2022-05-26 18:28:01 +02:00
|
|
|
with open(merge_source_file, 'r', encoding='UTF-8') as f1:
|
|
|
|
out_json = json.loads(f1.read())
|
|
|
|
|
2023-04-03 19:29:31 +02:00
|
|
|
format_all_entries(out_json)
|
2022-05-21 23:06:33 +02:00
|
|
|
|
2022-05-21 22:57:19 +02:00
|
|
|
base_image_path = os.path.join('..', 'web', '_img', 'canvas', 'place30')
|
|
|
|
ScaleConfig.image1 = os.path.join(base_image_path, '159.png')
|
|
|
|
scale_back.swap_source_dest('164', '165', os.path.join(base_image_path, '163_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
scale_back.swap_source_dest('165', '166', os.path.join(base_image_path, '164_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
scale_back.swap_source_dest('166', '167', os.path.join(base_image_path, '165_159.png'))
|
2022-05-26 18:28:01 +02:00
|
|
|
scale_back.scale_back_entries(out_json)
|
2022-05-21 22:57:19 +02:00
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
out_ids = set()
|
|
|
|
out_dupe_ids = set()
|
|
|
|
atlas_ids = {}
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-04-09 16:42:05 +02:00
|
|
|
with open('../web/atlas.json', 'r', encoding='utf-8') as atlas_file:
|
|
|
|
atlas_json = json.loads(atlas_file.read())
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
for i, entry in enumerate(atlas_json):
|
|
|
|
atlas_ids[entry['id']] = i
|
2022-04-09 16:07:46 +02:00
|
|
|
|
2023-06-14 04:04:32 +02:00
|
|
|
last_existing_id = list(atlas_json[-1]['id'])
|
|
|
|
|
|
|
|
for entry in out_json:
|
|
|
|
if entry['id'] == 0 or entry['id'] == '0':
|
|
|
|
# "Increment" the last ID to derive a new ID.
|
|
|
|
current_index = -1
|
|
|
|
while current_index > -(len(last_existing_id)):
|
|
|
|
current_char = last_existing_id[current_index]
|
|
|
|
|
|
|
|
if current_char == 'z':
|
|
|
|
last_existing_id[current_index] = '0'
|
|
|
|
current_index -= 1
|
|
|
|
else:
|
|
|
|
if current_char == '9':
|
|
|
|
current_char = 'a'
|
|
|
|
else:
|
|
|
|
current_char = chr(ord(current_char) + 1)
|
|
|
|
last_existing_id[current_index] = current_char
|
|
|
|
break
|
|
|
|
entry['id'] = ''.join(last_existing_id)
|
|
|
|
|
2022-04-09 16:07:46 +02:00
|
|
|
for entry in out_json:
|
2022-05-14 17:53:36 +02:00
|
|
|
if entry['id'] in out_ids:
|
2022-04-09 16:07:46 +02:00
|
|
|
print(f"Entry {entry['id']} has duplicates! Please resolve this conflict. This will be excluded from the merge.")
|
2022-05-14 17:53:36 +02:00
|
|
|
out_dupe_ids.add(entry['id'])
|
|
|
|
out_ids.add(entry['id'])
|
|
|
|
|
2022-04-09 16:07:46 +02:00
|
|
|
for entry in out_json:
|
|
|
|
if entry['id'] in out_dupe_ids:
|
|
|
|
continue
|
|
|
|
|
2022-04-19 03:55:37 +02:00
|
|
|
if 'edit' in entry and entry['edit']:
|
2022-05-14 17:53:36 +02:00
|
|
|
assert entry['id'] in atlas_ids, "Edit failed! ID not found on Atlas."
|
|
|
|
index = atlas_ids[entry['id']]
|
2022-04-19 03:55:37 +02:00
|
|
|
|
|
|
|
assert index != None, "Edit failed! ID not found on Atlas."
|
|
|
|
|
|
|
|
print(f"Edited {atlas_json[index]['id']} with {entry['edit']}")
|
|
|
|
|
2022-05-14 17:53:36 +02:00
|
|
|
del entry['edit']
|
2022-04-09 16:07:46 +02:00
|
|
|
atlas_json[index] = entry
|
2022-05-14 17:53:36 +02:00
|
|
|
elif entry['id'] in atlas_ids:
|
|
|
|
print(f"Edited {entry['id']} manually.")
|
|
|
|
atlas_json[atlas_ids[entry['id']]] = entry
|
2022-04-09 16:07:46 +02:00
|
|
|
else:
|
2022-04-19 03:55:37 +02:00
|
|
|
print(f"Added {entry['id']}.")
|
2022-04-09 16:07:46 +02:00
|
|
|
atlas_json.append(entry)
|
|
|
|
|
2022-04-09 16:42:05 +02:00
|
|
|
print('Writing...')
|
|
|
|
with open('../web/atlas.json', 'w', encoding='utf-8') as atlas_file:
|
2023-04-25 08:20:36 +02:00
|
|
|
per_line_entries(atlas_json, atlas_file)
|
2022-04-09 16:42:05 +02:00
|
|
|
|
2022-04-19 03:55:37 +02:00
|
|
|
with open('../data/read-ids.txt', 'a', encoding='utf-8') as read_ids_file:
|
2023-03-21 13:29:18 +01:00
|
|
|
with open('temp-read-ids.txt', 'r+', encoding='utf-8') as read_ids_temp_file:
|
2022-04-19 03:55:37 +02:00
|
|
|
read_ids_file.writelines(read_ids_temp_file.readlines())
|
2022-10-16 21:03:21 +02:00
|
|
|
read_ids_temp_file.truncate(0)
|
2022-04-09 16:42:05 +02:00
|
|
|
|
|
|
|
print('All done.')
|