better exception handling on deserialization/ data loading

This commit is contained in:
CreepyCre 2021-10-19 15:12:29 +02:00
parent 7dbc3c715e
commit e0474d85c0
4 changed files with 81 additions and 8 deletions

View file

@ -0,0 +1,51 @@
package org.dimdev.dimdoors.api.util;
import org.apache.logging.log4j.util.TriConsumer;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
public class ExceptionHandlingCollector<T, A, R> implements Collector<T, A, R> {
private final Collector<T, A, R> collector;
private final TriConsumer<A, T, Exception> exceptionalAccumulator;
public ExceptionHandlingCollector(Collector<T, A, R> collector, TriConsumer<A, T, Exception> exceptionalAccumulator) {
this.collector = collector;
this.exceptionalAccumulator = exceptionalAccumulator;
}
@Override
public Supplier<A> supplier() {
return collector.supplier();
}
@Override
public BiConsumer<A, T> accumulator() {
return (a, t) -> {
try {
collector.accumulator().accept(a, t);
} catch (Exception e) {
exceptionalAccumulator.accept(a, t, e);
}
};
}
@Override
public BinaryOperator<A> combiner() {
return collector.combiner();
}
@Override
public Function<A, R> finisher() {
return collector.finisher();
}
@Override
public Set<Characteristics> characteristics() {
return collector.characteristics();
}
}

View file

@ -154,8 +154,13 @@ public class DoorDataReader {
return;
}
JsonObject json = GSON.fromJson(jsonStr, JsonObject.class);
try (DoorData ignored = DoorData.fromJson(json)) {
LOGGER.info("Loaded door json from {} with id {}", p.toAbsolutePath().toString(), ignored.getId());
// TODO: someone check whether this makes sense.
try {
try (DoorData ignored = DoorData.fromJson(json)) {
LOGGER.info("Loaded door json from {} with id {}", p.toAbsolutePath().toString(), ignored.getId());
}
} catch (Exception e) {
LOGGER.error("Error trying to load door json from path " + p.toAbsolutePath().toString(), e);
}
}
}

View file

@ -8,6 +8,8 @@ import net.minecraft.util.math.BlockBox;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraft.world.chunk.Chunk;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dimdev.dimdoors.DimensionalDoorsInitializer;
import org.dimdev.dimdoors.api.util.BlockBoxUtil;
import org.dimdev.dimdoors.block.entity.RiftBlockEntity;
@ -19,6 +21,7 @@ import java.util.Map;
import java.util.stream.Collectors;
public class AbsoluteRiftBlockEntityModifier implements LazyModifier {
private static final Logger LOGGER = LogManager.getLogger();
public static final String KEY = "block_entity";
private Map<BlockPos, RiftBlockEntity> rifts;
@ -35,6 +38,13 @@ public class AbsoluteRiftBlockEntityModifier implements LazyModifier {
@Override
public Modifier fromNbt(NbtCompound nbt) {
serializedRifts = nbt.getList("rifts", NbtType.COMPOUND).parallelStream().unordered().map(NbtCompound.class::cast)
.filter(compound -> {
if (compound.contains("Pos")) {
return true;
}
LOGGER.error("Discarding rift on deserialization since \"Pos\" tag was not set.");
return false;
})
.collect(Collectors.toConcurrentMap(compound -> {
int[] ints = compound.getIntArray("Pos");
return new BlockPos(ints[0], ints[1], ints[2]);

View file

@ -10,6 +10,9 @@ import net.minecraft.nbt.NbtIo;
import net.minecraft.nbt.NbtOps;
import net.minecraft.resource.ResourceManager;
import net.minecraft.util.Identifier;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dimdev.dimdoors.api.util.ExceptionHandlingCollector;
import org.dimdev.dimdoors.api.util.Path;
import java.io.IOException;
@ -17,12 +20,14 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
public class ResourceUtil {
private static final Logger LOGGER = LogManager.getLogger();
private static final Gson GSON = new GsonBuilder().setLenient().setPrettyPrinting().create();
public static final BiFunction<String, Identifier, Path<String>> PATH_KEY_PROVIDER = (startingPath, id) -> Path.stringPath(id.getNamespace() + ":" + id.getPath().substring(0, id.getPath().lastIndexOf(".")).substring(startingPath.length() + (startingPath.endsWith("/") ? 0 : 1)));
@ -44,15 +49,16 @@ public class ResourceUtil {
public static <K, T, M extends Map<K, T>> CompletableFuture<M> loadResourcePathToMap(ResourceManager manager, String startingPath, String extension, M map, BiFunction<InputStream, K, T> reader, BiFunction<String, Identifier, K> keyProvider) {
Collection<Identifier> ids = manager.findResources(startingPath, str -> str.endsWith(extension));
return CompletableFuture.supplyAsync(() -> {
map.putAll(ids.parallelStream().unordered().collect(Collectors.toConcurrentMap(
map.putAll(ids.parallelStream().unordered().collect(new ExceptionHandlingCollector<>(Collectors.toConcurrentMap(
id -> keyProvider.apply(startingPath, id),
id -> {
try {
return reader.apply(manager.getResource(id).getInputStream(), keyProvider.apply(startingPath, id));
} catch (IOException | RuntimeException e) {
throw new RuntimeException("Error loading resource: " + id);
throw new RuntimeException(e);
}
})));
}),
(a, id, exception) -> LOGGER.error("Error loading resource: " + id, exception))));
return map;
});
}
@ -63,10 +69,11 @@ public class ResourceUtil {
collection.addAll(ids.parallelStream().unordered().map(id -> {
try {
return reader.apply(manager.getResource(id).getInputStream(), id);
} catch (IOException e) {
throw new RuntimeException("Error loading resource: " + id);
} catch (Exception e) {
LOGGER.error("Error loading resource: " + id, e);
return null;
}
}).collect(Collectors.toList())); // TODO: change this to smthn concurrent
}).collect(Collectors.filtering(Objects::nonNull, Collectors.toList()))); // TODO: change this to smthn concurrent
return collection;
});
}