mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-12-28 11:39:07 +00:00
Performance Improvements
This commit is contained in:
@@ -21,6 +21,8 @@ package com.volmit.iris.util.mantle;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.engine.data.cache.Cache;
|
||||
import com.volmit.iris.engine.mantle.EngineMantle;
|
||||
import com.volmit.iris.engine.mantle.MantleWriter;
|
||||
import com.volmit.iris.engine.object.basic.IrisPosition;
|
||||
import com.volmit.iris.engine.object.feature.IrisFeaturePositional;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
@@ -101,6 +103,20 @@ public class Mantle {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a cached writer which only contains cached chunks.
|
||||
* This avoids locking on regions when writing to lots of chunks
|
||||
* @param x the x chunk
|
||||
* @param z the z chunk
|
||||
* @param radius the radius chunks
|
||||
* @return the writer
|
||||
*/
|
||||
@ChunkCoordinates
|
||||
public MantleWriter write(EngineMantle engineMantle, int x, int z, int radius)
|
||||
{
|
||||
return new MantleWriter(engineMantle, this, x, z, radius);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lower a flag if it is raised. If the flag was lowered (meaning it was previously raised), execute the runnable
|
||||
* @param x the chunk x
|
||||
@@ -911,4 +927,8 @@ public class Mantle {
|
||||
private static double lengthSq(double x, double z) {
|
||||
return (x * x) + (z * z);
|
||||
}
|
||||
|
||||
public int getWorldHeight() {
|
||||
return worldHeight;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ import com.volmit.iris.util.hunk.Hunk;
|
||||
import com.volmit.iris.util.math.BlockPosition;
|
||||
import org.bukkit.World;
|
||||
import org.bukkit.block.data.BlockData;
|
||||
import org.bukkit.craftbukkit.v1_17_R1.block.data.type.CraftLeaves;
|
||||
import org.bukkit.entity.Entity;
|
||||
|
||||
import java.io.*;
|
||||
@@ -189,7 +190,16 @@ public interface Matter {
|
||||
slice = (MatterSlice<T>) createSlice(c, this);
|
||||
|
||||
if (slice == null) {
|
||||
Iris.error("Unable to find a slice for class " + C.DARK_RED + c.getCanonicalName());
|
||||
try
|
||||
{
|
||||
throw new RuntimeException("Bad slice " + c.getCanonicalName());
|
||||
}
|
||||
|
||||
catch(Throwable e)
|
||||
{
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -225,7 +225,7 @@ public class Chunk {
|
||||
* @param blockZ The z-coordinate of the block.
|
||||
* @return The biome id or -1 if the biomes are not correctly initialized.
|
||||
*/
|
||||
public int getBiomeAt(int blockX, int blockY, int blockZ) {
|
||||
public synchronized int getBiomeAt(int blockX, int blockY, int blockZ) {
|
||||
if (dataVersion < 2202) {
|
||||
if (biomes == null || biomes.length != 256) {
|
||||
return -1;
|
||||
@@ -244,7 +244,7 @@ public class Chunk {
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public void setBiomeAt(int blockX, int blockZ, int biomeID) {
|
||||
public synchronized void setBiomeAt(int blockX, int blockZ, int biomeID) {
|
||||
if (dataVersion < 2202) {
|
||||
if (biomes == null || biomes.length != 256) {
|
||||
biomes = new int[256];
|
||||
@@ -275,7 +275,7 @@ public class Chunk {
|
||||
* @param biomeID The biome id to be set.
|
||||
* When set to a negative number, Minecraft will replace it with the block column's default biome.
|
||||
*/
|
||||
public void setBiomeAt(int blockX, int blockY, int blockZ, int biomeID) {
|
||||
public synchronized void setBiomeAt(int blockX, int blockY, int blockZ, int biomeID) {
|
||||
if (dataVersion < 2202) {
|
||||
if (biomes == null || biomes.length != 256) {
|
||||
biomes = new int[256];
|
||||
|
||||
@@ -27,6 +27,7 @@ import com.volmit.iris.util.format.C;
|
||||
import com.volmit.iris.util.math.M;
|
||||
import com.volmit.iris.util.nbt.tag.CompoundTag;
|
||||
import com.volmit.iris.util.nbt.tag.StringTag;
|
||||
import com.volmit.iris.util.parallel.HyperLock;
|
||||
import com.volmit.iris.util.scheduling.IrisLock;
|
||||
import org.bukkit.NamespacedKey;
|
||||
import org.bukkit.block.Biome;
|
||||
@@ -43,8 +44,8 @@ public class NBTWorld {
|
||||
private static final BlockData AIR = B.get("AIR");
|
||||
private static final Map<String, CompoundTag> blockDataCache = new KMap<>();
|
||||
private static final Map<Biome, Integer> biomeIds = computeBiomeIDs();
|
||||
private final IrisLock regionLock = new IrisLock("Region");
|
||||
private final KMap<Long, MCAFile> loadedRegions;
|
||||
private final HyperLock hyperLock = new HyperLock();
|
||||
private final KMap<Long, Long> lastUse;
|
||||
private final File worldFolder;
|
||||
private final ExecutorService saveQueue;
|
||||
@@ -62,13 +63,11 @@ public class NBTWorld {
|
||||
}
|
||||
|
||||
public void close() {
|
||||
regionLock.lock();
|
||||
|
||||
for (Long i : loadedRegions.k()) {
|
||||
queueSaveUnload(Cache.keyX(i), Cache.keyZ(i));
|
||||
}
|
||||
|
||||
regionLock.unlock();
|
||||
saveQueue.shutdown();
|
||||
try {
|
||||
while (!saveQueue.awaitTermination(3, TimeUnit.SECONDS)) {
|
||||
@@ -80,13 +79,9 @@ public class NBTWorld {
|
||||
}
|
||||
|
||||
public void flushNow() {
|
||||
regionLock.lock();
|
||||
|
||||
for (Long i : loadedRegions.k()) {
|
||||
doSaveUnload(Cache.keyX(i), Cache.keyZ(i));
|
||||
}
|
||||
|
||||
regionLock.unlock();
|
||||
}
|
||||
|
||||
public void queueSaveUnload(int x, int z) {
|
||||
@@ -103,8 +98,6 @@ public class NBTWorld {
|
||||
}
|
||||
|
||||
public void save() {
|
||||
regionLock.lock();
|
||||
|
||||
boolean saving = true;
|
||||
|
||||
for (Long i : loadedRegions.k()) {
|
||||
@@ -121,8 +114,6 @@ public class NBTWorld {
|
||||
}
|
||||
|
||||
Iris.debug("Regions: " + C.GOLD + loadedRegions.size() + C.LIGHT_PURPLE);
|
||||
|
||||
regionLock.unlock();
|
||||
}
|
||||
|
||||
public void queueSave() {
|
||||
@@ -131,10 +122,8 @@ public class NBTWorld {
|
||||
|
||||
public synchronized void unloadRegion(int x, int z) {
|
||||
long key = Cache.key(x, z);
|
||||
regionLock.lock();
|
||||
loadedRegions.remove(key);
|
||||
lastUse.remove(key);
|
||||
regionLock.unlock();
|
||||
Iris.debug("Unloaded Region " + C.GOLD + x + " " + z);
|
||||
}
|
||||
|
||||
@@ -249,6 +238,11 @@ public class NBTWorld {
|
||||
getChunkSection(x >> 4, y >> 4, z >> 4).setBlockStateAt(x & 15, y & 15, z & 15, getCompound(data), false);
|
||||
}
|
||||
|
||||
public int getBiomeId(Biome b)
|
||||
{
|
||||
return biomeIds.get(b);
|
||||
}
|
||||
|
||||
public void setBiome(int x, int y, int z, Biome biome) {
|
||||
getChunk(x >> 4, z >> 4).setBiomeAt(x & 15, y, z & 15, biomeIds.get(biome));
|
||||
}
|
||||
@@ -265,8 +259,12 @@ public class NBTWorld {
|
||||
return s;
|
||||
}
|
||||
|
||||
public synchronized Chunk getChunk(int x, int z) {
|
||||
MCAFile mca = getMCA(x >> 5, z >> 5);
|
||||
public Chunk getChunk(int x, int z)
|
||||
{
|
||||
return getChunk(getMCA(x >> 5, z >> 5), x, z);
|
||||
}
|
||||
|
||||
public Chunk getChunk(MCAFile mca, int x, int z) {
|
||||
Chunk c = mca.getChunk(x & 31, z & 31);
|
||||
|
||||
if (c == null) {
|
||||
@@ -278,41 +276,40 @@ public class NBTWorld {
|
||||
}
|
||||
|
||||
public long getIdleDuration(int x, int z) {
|
||||
Long l = lastUse.get(Cache.key(x, z));
|
||||
|
||||
return l == null ? 0 : (M.ms() - l);
|
||||
return hyperLock.withResult(x, z, () -> {
|
||||
Long l = lastUse.get(Cache.key(x, z));
|
||||
return l == null ? 0 : (M.ms() - l);
|
||||
});
|
||||
}
|
||||
|
||||
public MCAFile getMCA(int x, int z) {
|
||||
long key = Cache.key(x, z);
|
||||
|
||||
regionLock.lock();
|
||||
lastUse.put(key, M.ms());
|
||||
MCAFile mcaf = loadedRegions.get(key);
|
||||
regionLock.unlock();
|
||||
return hyperLock.withResult(x, z, () -> {
|
||||
lastUse.put(key, M.ms());
|
||||
|
||||
if (mcaf == null) {
|
||||
mcaf = new MCAFile(x, z);
|
||||
regionLock.lock();
|
||||
loadedRegions.put(key, mcaf);
|
||||
regionLock.unlock();
|
||||
}
|
||||
MCAFile mcaf = loadedRegions.get(key);
|
||||
|
||||
return mcaf;
|
||||
if (mcaf == null) {
|
||||
mcaf = new MCAFile(x, z);
|
||||
loadedRegions.put(key, mcaf);
|
||||
}
|
||||
|
||||
return mcaf;
|
||||
});
|
||||
}
|
||||
|
||||
public MCAFile getMCAOrNull(int x, int z) {
|
||||
long key = Cache.key(x, z);
|
||||
MCAFile ff = null;
|
||||
regionLock.lock();
|
||||
|
||||
if (loadedRegions.containsKey(key)) {
|
||||
lastUse.put(key, M.ms());
|
||||
ff = loadedRegions.get(key);
|
||||
}
|
||||
return hyperLock.withResult(x, z, () -> {
|
||||
if (loadedRegions.containsKey(key)) {
|
||||
lastUse.put(key, M.ms());
|
||||
return loadedRegions.get(key);
|
||||
}
|
||||
|
||||
regionLock.unlock();
|
||||
return ff;
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
public int size() {
|
||||
|
||||
@@ -24,6 +24,7 @@ import com.volmit.iris.util.nbt.tag.ByteArrayTag;
|
||||
import com.volmit.iris.util.nbt.tag.CompoundTag;
|
||||
import com.volmit.iris.util.nbt.tag.ListTag;
|
||||
import com.volmit.iris.util.nbt.tag.LongArrayTag;
|
||||
import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -170,7 +171,7 @@ public class Section {
|
||||
* This option should only be used moderately to avoid unnecessary recalculation of the palette indices.
|
||||
* Recalculating the Palette should only be executed once right before saving the Section to file.
|
||||
*/
|
||||
public void setBlockStateAt(int blockX, int blockY, int blockZ, CompoundTag state, boolean cleanup) {
|
||||
public synchronized void setBlockStateAt(int blockX, int blockY, int blockZ, CompoundTag state, boolean cleanup) {
|
||||
int paletteSizeBefore = palette.size();
|
||||
int paletteIndex = addToPalette(state);
|
||||
//power of 2 --> bits must increase, but only if the palette size changed
|
||||
@@ -223,7 +224,7 @@ public class Section {
|
||||
* @param paletteIndex The block state to be set (index of block data in the palette).
|
||||
* @param blockStates The block states to be updated.
|
||||
*/
|
||||
public void setPaletteIndex(int blockIndex, int paletteIndex, AtomicLongArray blockStates) {
|
||||
public synchronized void setPaletteIndex(int blockIndex, int paletteIndex, AtomicLongArray blockStates) {
|
||||
int bits = blockStates.length() >> 6;
|
||||
|
||||
if (dataVersion < 2527) {
|
||||
@@ -253,7 +254,7 @@ public class Section {
|
||||
return palette;
|
||||
}
|
||||
|
||||
int addToPalette(CompoundTag data) {
|
||||
synchronized int addToPalette(CompoundTag data) {
|
||||
PaletteIndex index;
|
||||
if ((index = getValueIndexedPalette(data)) != null) {
|
||||
return index.index;
|
||||
@@ -283,14 +284,14 @@ public class Section {
|
||||
* This should only be used moderately to avoid unnecessary recalculation of the palette indices.
|
||||
* Recalculating the Palette should only be executed once right before saving the Section to file.
|
||||
*/
|
||||
public void cleanupPaletteAndBlockStates() {
|
||||
public synchronized void cleanupPaletteAndBlockStates() {
|
||||
Map<Integer, Integer> oldToNewMapping = cleanupPalette();
|
||||
adjustBlockStateBits(oldToNewMapping, blockStates);
|
||||
}
|
||||
|
||||
private Map<Integer, Integer> cleanupPalette() {
|
||||
private synchronized Map<Integer, Integer> cleanupPalette() {
|
||||
//create index - palette mapping
|
||||
Map<Integer, Integer> allIndices = new HashMap<>();
|
||||
Map<Integer, Integer> allIndices = new Int2IntOpenHashMap();
|
||||
for (int i = 0; i < 4096; i++) {
|
||||
int paletteIndex = getPaletteIndex(i);
|
||||
allIndices.put(paletteIndex, paletteIndex);
|
||||
@@ -314,7 +315,7 @@ public class Section {
|
||||
return allIndices;
|
||||
}
|
||||
|
||||
void adjustBlockStateBits(Map<Integer, Integer> oldToNewMapping, AtomicLongArray blockStates) {
|
||||
synchronized void adjustBlockStateBits(Map<Integer, Integer> oldToNewMapping, AtomicLongArray blockStates) {
|
||||
//increases or decreases the amount of bits used per BlockState
|
||||
//based on the size of the palette. oldToNewMapping can be used to update indices
|
||||
//if the palette had been cleaned up before using MCAFile#cleanupPalette().
|
||||
@@ -376,7 +377,7 @@ public class Section {
|
||||
* @throws NullPointerException If <code>blockStates</code> is <code>null</code>
|
||||
* @throws IllegalArgumentException When <code>blockStates</code>' length is < 256 or > 4096 and is not a multiple of 64
|
||||
*/
|
||||
public void setBlockStates(AtomicLongArray blockStates) {
|
||||
public synchronized void setBlockStates(AtomicLongArray blockStates) {
|
||||
if (blockStates == null) {
|
||||
throw new NullPointerException("BlockStates cannot be null");
|
||||
} else if (blockStates.length() % 64 != 0 || blockStates.length() < 256 || blockStates.length() > 4096) {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
package com.volmit.iris.util.parallel;
|
||||
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.IrisSettings;
|
||||
import com.volmit.iris.core.service.PreservationSVC;
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.math.M;
|
||||
@@ -50,7 +51,7 @@ public class MultiBurst {
|
||||
private synchronized ExecutorService getService() {
|
||||
last.set(M.ms());
|
||||
if (service == null || service.isShutdown()) {
|
||||
service = new ForkJoinPool(Runtime.getRuntime().availableProcessors(),
|
||||
service = new ForkJoinPool(IrisSettings.getThreadCount(IrisSettings.get().getConcurrency().getParallelism()),
|
||||
new ForkJoinPool.ForkJoinWorkerThreadFactory() {
|
||||
int m = 0;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user