9
0
mirror of https://github.com/Winds-Studio/Leaf.git synced 2025-12-26 18:39:23 +00:00

Couple fixes and improvements (#287)

* a lot of cleanup and new chunk changes

* perf: Head Node Hit Optimization

* part 1: reworked-reworked ChunkHolderManager

* part 2: speeeeeeeeeeeeeeeeeeeeeeeeeeed

* Optimise MobEffectUtil#getDigSpeedAmplification

* optimize chunk unloads and cleanup a bit

* fix 🐝

* rewritten async target finding

* extend the custom map usage

---------

Co-authored-by: Dreeam <61569423+Dreeam-qwq@users.noreply.github.com>
This commit is contained in:
Taiyou
2025-04-21 22:49:33 +02:00
committed by GitHub
parent e82999dd18
commit 83e9043a45
37 changed files with 2884 additions and 310 deletions

View File

@@ -1,78 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 16 Feb 2025 19:03:23 +0100
Subject: [PATCH] Optimize AABB
Pretty minor stuff but, it improves AABB.intersect by around ~5%
diff --git a/net/minecraft/world/phys/AABB.java b/net/minecraft/world/phys/AABB.java
index f64c04b32dd2d0fe143fc8bf9f498e52beb66a58..00daaff66bd26e9ca15a7eb4052ff38f9e662f7b 100644
--- a/net/minecraft/world/phys/AABB.java
+++ b/net/minecraft/world/phys/AABB.java
@@ -220,13 +220,16 @@ public class AABB {
}
public AABB intersect(AABB other) {
- double max = Math.max(this.minX, other.minX);
- double max1 = Math.max(this.minY, other.minY);
- double max2 = Math.max(this.minZ, other.minZ);
- double min = Math.min(this.maxX, other.maxX);
- double min1 = Math.min(this.maxY, other.maxY);
- double min2 = Math.min(this.maxZ, other.maxZ);
- return new AABB(max, max1, max2, min, min1, min2);
+ // Leaf start - Optimize AABB
+ return new AABB(
+ this.minX > other.minX ? this.minX : other.minX,
+ this.minY > other.minY ? this.minY : other.minY,
+ this.minZ > other.minZ ? this.minZ : other.minZ,
+ this.maxX < other.maxX ? this.maxX : other.maxX,
+ this.maxY < other.maxY ? this.maxY : other.maxY,
+ this.maxZ < other.maxZ ? this.maxZ : other.maxZ
+ );
+ // Leaf end - Optimize AABB
}
public AABB minmax(AABB other) {
@@ -258,16 +261,39 @@ public class AABB {
}
public boolean intersects(AABB other) {
- return this.intersects(other.minX, other.minY, other.minZ, other.maxX, other.maxY, other.maxZ);
+ // Leaf start - Optimize AABB
+ // Removed redundant method call overhead
+ return this.minX < other.maxX &&
+ this.maxX > other.minX &&
+ this.minY < other.maxY &&
+ this.maxY > other.minY &&
+ this.minZ < other.maxZ &&
+ this.maxZ > other.minZ;
+ // Leaf end - Optimize AABB
}
public boolean intersects(double x1, double y1, double z1, double x2, double y2, double z2) {
- return this.minX < x2 && this.maxX > x1 && this.minY < y2 && this.maxY > y1 && this.minZ < z2 && this.maxZ > z1;
+ // Leaf start - Optimize AABB
+ // No temporary variables needed, direct comparison
+ return this.minX < x2 &&
+ this.maxX > x1 &&
+ this.minY < y2 &&
+ this.maxY > y1 &&
+ this.minZ < z2 &&
+ this.maxZ > z1;
+ // Leaf end - Optimize AABB
}
public boolean intersects(Vec3 min, Vec3 max) {
return this.intersects(
- Math.min(min.x, max.x), Math.min(min.y, max.y), Math.min(min.z, max.z), Math.max(min.x, max.x), Math.max(min.y, max.y), Math.max(min.z, max.z)
+ // Leaf start - Optimize AABB
+ min.x < max.x ? min.x : max.x,
+ min.y < max.y ? min.y : max.y,
+ min.z < max.z ? min.z : max.z,
+ min.x > max.x ? min.x : max.x,
+ min.y > max.y ? min.y : max.y,
+ min.z > max.z ? min.z : max.z
+ // Leaf end - Optimize AABB
);
}

View File

@@ -1,84 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Tue, 25 Feb 2025 21:13:54 +0100
Subject: [PATCH] Some Optimizations on SerializableChunkData
diff --git a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
index 6b6aaeca14178b5b709e20ae13552d42217f15c0..c0939c311c554a4660b80725294663bab7915733 100644
--- a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
+++ b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
@@ -502,14 +502,16 @@ public record SerializableChunkData(
throw new IllegalArgumentException("Chunk can't be serialized: " + chunk);
} else {
ChunkPos pos = chunk.getPos();
- List<SerializableChunkData.SectionData> list = new ArrayList<>(); final List<SerializableChunkData.SectionData> sectionsList = list; // Paper - starlight - OBFHELPER
- LevelChunkSection[] sections = chunk.getSections();
- LevelLightEngine lightEngine = level.getChunkSource().getLightEngine();
// Paper start - starlight
final int minLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinLightSection(level);
final int maxLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMaxLightSection(level);
final int minBlockSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinSection(level);
+ // Leaf start - Some Optimizations on SerializableChunkData
+ // Pre-allocate with correct capacity to avoid resizing
+ final int expectedSectionCount = maxLightSection - minLightSection + 1;
+ List<SerializableChunkData.SectionData> list = new ArrayList<>(expectedSectionCount);
+ // Leaf end - Some Optimizations on SerializableChunkData
final LevelChunkSection[] chunkSections = chunk.getSections();
final ca.spottedleaf.moonrise.patches.starlight.light.SWMRNibbleArray[] blockNibbles = ((ca.spottedleaf.moonrise.patches.starlight.chunk.StarlightChunk)chunk).starlight$getBlockNibbles();
@@ -541,10 +543,11 @@ public record SerializableChunkData(
((ca.spottedleaf.moonrise.patches.starlight.storage.StarlightSectionData)(Object)sectionData).starlight$setSkyLightState(skyNibble.state);
}
- sectionsList.add(sectionData);
+ list.add(sectionData); // Leaf - Some Optimizations on SerializableChunkData
}
// Paper end - starlight
+ // Pre-allocate block entities list with exact size needed
List<CompoundTag> list1 = new ArrayList<>(chunk.getBlockEntitiesPos().size());
for (BlockPos blockPos : chunk.getBlockEntitiesPos()) {
@@ -554,7 +557,16 @@ public record SerializableChunkData(
}
}
- List<CompoundTag> list2 = new ArrayList<>();
+ // Leaf start - Some Optimizations on SerializableChunkData
+ // For entities, use an initial estimated capacity if it's a ProtoChunk
+ int entityEstimate = 64; // Reasonable default size
+ if (chunk.getPersistedStatus().getChunkType() == ChunkType.PROTOCHUNK) {
+ ProtoChunk protoChunk = (ProtoChunk)chunk;
+ entityEstimate = Math.max(16, protoChunk.getEntities().size());
+ }
+ List<CompoundTag> list2 = new ArrayList<>(entityEstimate);
+ // Leaf end - Some Optimizations on SerializableChunkData
+
long[] longs = null;
if (chunk.getPersistedStatus().getChunkType() == ChunkType.PROTOCHUNK) {
ProtoChunk protoChunk = (ProtoChunk)chunk;
@@ -570,14 +582,18 @@ public record SerializableChunkData(
for (Entry<Heightmap.Types, Heightmap> entry : chunk.getHeightmaps()) {
if (chunk.getPersistedStatus().heightmapsAfter().contains(entry.getKey())) {
long[] rawData = entry.getValue().getRawData();
- map.put(entry.getKey(), (long[])rawData.clone());
+ map.put(entry.getKey(), Arrays.copyOf(rawData, rawData.length)); // Leaf - Some Optimizations on SerializableChunkData
}
}
ChunkAccess.PackedTicks ticksForSerialization = chunk.getTicksForSerialization(level.getGameTime());
- ShortList[] lists = Arrays.stream(chunk.getPostProcessing())
- .map(list3 -> list3 != null ? new ShortArrayList(list3) : null)
- .toArray(ShortList[]::new);
+ // Leaf start - Some Optimizations on SerializableChunkData
+ ShortList[] postProcessing = chunk.getPostProcessing();
+ ShortList[] lists = new ShortList[postProcessing.length];
+ for (int i = 0; i < postProcessing.length; i++) {
+ lists[i] = postProcessing[i] != null ? new ShortArrayList(postProcessing[i]) : null;
+ }
+ // Leaf end - Some Optimizations on SerializableChunkData
CompoundTag compoundTag = packStructureData(
StructurePieceSerializationContext.fromLevel(level), pos, chunk.getAllStarts(), chunk.getAllReferences()
);

View File

@@ -1,134 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Thu, 27 Feb 2025 23:39:32 +0100
Subject: [PATCH] Rework ChunkHolderManager
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
index be820c6093dd2ae7642b9bee11edf65e3a8d7242..d6a30d6735d24f24a8108b6a5d15725587bb662a 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
@@ -736,24 +736,20 @@ public final class ChunkHolderManager {
final int sectionShift = ((ChunkSystemServerLevel)this.world).moonrise$getRegionChunkShift();
- final Predicate<Ticket<?>> expireNow = (final Ticket<?> ticket) -> {
- long removeDelay = ((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay();
- if (removeDelay == NO_TIMEOUT_MARKER) {
- return false;
- }
- --removeDelay;
- ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(removeDelay);
- return removeDelay <= 0L;
- };
-
+ // Leaf start - Rework ChunkHolderManager
+ // Collect sections to process first to avoid concurrent modification issues
+ List<Long> sectionKeys = new ArrayList<>();
for (final PrimitiveIterator.OfLong iterator = this.sectionToChunkToExpireCount.keyIterator(); iterator.hasNext();) {
- final long sectionKey = iterator.nextLong();
+ sectionKeys.add(iterator.nextLong());
+ }
+ for (final Long sectionKey : sectionKeys) {
+ // Skip if section was removed concurrently
if (!this.sectionToChunkToExpireCount.containsKey(sectionKey)) {
- // removed concurrently
continue;
}
+ // Acquire lock for this section only
final ReentrantAreaLock.Node ticketLock = this.ticketLockArea.lock(
CoordinateUtils.getChunkX(sectionKey) << sectionShift,
CoordinateUtils.getChunkZ(sectionKey) << sectionShift
@@ -761,11 +757,15 @@ public final class ChunkHolderManager {
try {
final Long2IntOpenHashMap chunkToExpireCount = this.sectionToChunkToExpireCount.get(sectionKey);
- if (chunkToExpireCount == null) {
- // lost to some race
+ if (chunkToExpireCount == null || chunkToExpireCount.isEmpty()) {
+ // Section was removed or is empty, clean up
+ if (chunkToExpireCount != null && chunkToExpireCount.isEmpty()) {
+ this.sectionToChunkToExpireCount.remove(sectionKey);
+ }
continue;
}
+ // Process each chunk in this section
for (final Iterator<Long2IntMap.Entry> iterator1 = chunkToExpireCount.long2IntEntrySet().fastIterator(); iterator1.hasNext();) {
final Long2IntMap.Entry entry = iterator1.next();
@@ -773,33 +773,51 @@ public final class ChunkHolderManager {
final int expireCount = entry.getIntValue();
final SortedArraySet<Ticket<?>> tickets = this.tickets.get(chunkKey);
+ if (tickets == null || tickets.isEmpty()) {
+ iterator1.remove();
+ continue;
+ }
+
final int levelBefore = getTicketLevelAt(tickets);
+ int expiredCount = 0;
- final int sizeBefore = tickets.size();
- tickets.removeIf(expireNow);
- final int sizeAfter = tickets.size();
- final int levelAfter = getTicketLevelAt(tickets);
+ // More efficient ticket processing - avoids creating a new predicate each time
+ for (Iterator<Ticket<?>> ticketIterator = tickets.iterator(); ticketIterator.hasNext();) {
+ Ticket<?> ticket = ticketIterator.next();
+ long removeDelay = ((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay();
+
+ if (removeDelay == NO_TIMEOUT_MARKER) {
+ continue;
+ }
+
+ --removeDelay;
+ if (removeDelay <= 0) {
+ ticketIterator.remove();
+ expiredCount++;
+ } else {
+ ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(removeDelay);
+ }
+ }
if (tickets.isEmpty()) {
this.tickets.remove(chunkKey);
}
+
+ final int levelAfter = getTicketLevelAt(tickets);
if (levelBefore != levelAfter) {
this.updateTicketLevel(chunkKey, levelAfter);
}
- final int newExpireCount = expireCount - (sizeBefore - sizeAfter);
-
- if (newExpireCount == expireCount) {
- continue;
- }
-
- if (newExpireCount != 0) {
- entry.setValue(newExpireCount);
- } else {
+ // Update expire count
+ final int newExpireCount = expireCount - expiredCount;
+ if (newExpireCount <= 0) {
iterator1.remove();
+ } else if (newExpireCount != expireCount) {
+ entry.setValue(newExpireCount);
}
}
+ // Remove empty sections
if (chunkToExpireCount.isEmpty()) {
this.sectionToChunkToExpireCount.remove(sectionKey);
}
@@ -807,6 +825,7 @@ public final class ChunkHolderManager {
this.ticketLockArea.unlock(ticketLock);
}
}
+ // Leaf end - Rework ChunkHolderManager
this.processTicketUpdates();
}

View File

@@ -6,10 +6,10 @@ Subject: [PATCH] SparklyPaper: Parallel world ticking
Original project: https://github.com/SparklyPower/SparklyPaper
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
index d6a30d6735d24f24a8108b6a5d15725587bb662a..39517966935265bc4533d4ce414d2df72df5a614 100644
index be820c6093dd2ae7642b9bee11edf65e3a8d7242..06ac3537f5655d048d770bb004243f207fad9faa 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
@@ -1050,7 +1050,7 @@ public final class ChunkHolderManager {
@@ -1031,7 +1031,7 @@ public final class ChunkHolderManager {
if (changedFullStatus.isEmpty()) {
return;
}
@@ -18,7 +18,7 @@ index d6a30d6735d24f24a8108b6a5d15725587bb662a..39517966935265bc4533d4ce414d2df7
this.taskScheduler.scheduleChunkTask(() -> {
final ArrayDeque<NewChunkHolder> pendingFullLoadUpdate = ChunkHolderManager.this.pendingFullLoadUpdate;
for (int i = 0, len = changedFullStatus.size(); i < len; ++i) {
@@ -1076,7 +1076,12 @@ public final class ChunkHolderManager {
@@ -1057,7 +1057,12 @@ public final class ChunkHolderManager {
// note: never call while inside the chunk system, this will absolutely break everything
public void processUnloads() {
@@ -32,7 +32,7 @@ index d6a30d6735d24f24a8108b6a5d15725587bb662a..39517966935265bc4533d4ce414d2df7
if (BLOCK_TICKET_UPDATES.get() == Boolean.TRUE) {
throw new IllegalStateException("Cannot unload chunks recursively");
@@ -1358,7 +1363,7 @@ public final class ChunkHolderManager {
@@ -1339,7 +1344,7 @@ public final class ChunkHolderManager {
List<NewChunkHolder> changedFullStatus = null;

View File

@@ -174,7 +174,7 @@ index 3a6db5bc0c8be7d68e15317a621c1965fdc3a9bd..50a9903367f49ece2a267d10944b1515
// Paper start - rewrite chunk system
private volatile ca.spottedleaf.moonrise.patches.starlight.light.SWMRNibbleArray[] blockNibbles;
diff --git a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
index c0939c311c554a4660b80725294663bab7915733..e2df93b2500a74c4cecac1515f3991967a07a052 100644
index 6b6aaeca14178b5b709e20ae13552d42217f15c0..e9ece9b618b0a9eb82b9f07a09ee6cb60cf7ec16 100644
--- a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
+++ b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
@@ -92,6 +92,7 @@ public record SerializableChunkData(
@@ -229,7 +229,7 @@ index c0939c311c554a4660b80725294663bab7915733..e2df93b2500a74c4cecac1515f399196
if (chunkType == ChunkType.LEVELCHUNK) {
return this.loadStarlightLightData(level, new ImposterProtoChunk((LevelChunk)chunkAccess, false)); // Paper - starlight
} else {
@@ -603,6 +627,7 @@ public record SerializableChunkData(
@@ -587,6 +611,7 @@ public record SerializableChunkData(
persistentDataContainer = chunk.persistentDataContainer.toTagCompound();
}
// CraftBukkit end
@@ -237,7 +237,7 @@ index c0939c311c554a4660b80725294663bab7915733..e2df93b2500a74c4cecac1515f399196
return new SerializableChunkData(
level.registryAccess().lookupOrThrow(Registries.BIOME),
pos,
@@ -623,6 +648,7 @@ public record SerializableChunkData(
@@ -607,6 +632,7 @@ public record SerializableChunkData(
list1,
compoundTag
, persistentDataContainer // CraftBukkit - persistentDataContainer
@@ -245,7 +245,7 @@ index c0939c311c554a4660b80725294663bab7915733..e2df93b2500a74c4cecac1515f399196
);
}
}
@@ -719,6 +745,21 @@ public record SerializableChunkData(
@@ -703,6 +729,21 @@ public record SerializableChunkData(
compoundTag.put("ChunkBukkitValues", this.persistentDataContainer);
}
// CraftBukkit end
@@ -267,7 +267,7 @@ index c0939c311c554a4660b80725294663bab7915733..e2df93b2500a74c4cecac1515f399196
// Paper start - starlight
if (this.lightCorrect && !this.chunkStatus.isBefore(net.minecraft.world.level.chunk.status.ChunkStatus.LIGHT)) {
// clobber vanilla value to force vanilla to relight
@@ -947,4 +988,50 @@ public record SerializableChunkData(
@@ -931,4 +972,50 @@ public record SerializableChunkData(
}
// Paper end - starlight - convert from record
}

View File

@@ -5,7 +5,7 @@ Subject: [PATCH] Async Block Finding
diff --git a/net/minecraft/world/entity/ai/goal/MoveToBlockGoal.java b/net/minecraft/world/entity/ai/goal/MoveToBlockGoal.java
index 3f080b15543bf8c5fa0774b62d7f12e13b82511a..007da9cb39ff76285c52ce0abdff60997acdff0f 100644
index 3f080b15543bf8c5fa0774b62d7f12e13b82511a..d70ed3ace6fa8f97bcc0d493842f44f43072a610 100644
--- a/net/minecraft/world/entity/ai/goal/MoveToBlockGoal.java
+++ b/net/minecraft/world/entity/ai/goal/MoveToBlockGoal.java
@@ -20,6 +20,18 @@ public abstract class MoveToBlockGoal extends Goal {
@@ -102,7 +102,7 @@ index 3f080b15543bf8c5fa0774b62d7f12e13b82511a..007da9cb39ff76285c52ce0abdff6099
+ this.isValidTarget(this.mob.level(), pos)) {
+
+ this.blockPos = pos;
+ this.mob.movingTarget = pos == BlockPos.ZERO ? null : pos;
+ this.mob.movingTarget = this.blockPos == BlockPos.ZERO ? null : this.blockPos; // Use the assigned blockPos
+ return true;
+ }
+ }
@@ -177,7 +177,7 @@ index 3f080b15543bf8c5fa0774b62d7f12e13b82511a..007da9cb39ff76285c52ce0abdff6099
- this.blockPos = mutableBlockPos;
- this.mob.movingTarget = mutableBlockPos == BlockPos.ZERO ? null : mutableBlockPos.immutable(); // Paper
+ this.blockPos = mutableBlockPos.immutable(); // Leaf - Async Block Finding
+ this.mob.movingTarget = this.blockPos == BlockPos.ZERO ? null : this.blockPos; // Paper // Leaf - Async Block Finding
+ this.mob.movingTarget = this.blockPos == BlockPos.ZERO ? null : this.blockPos; // Paper // Leaf - Async Block Finding - Use the assigned blockPos
return true;
}
}

View File

@@ -0,0 +1,148 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 13 Apr 2025 16:15:17 +0200
Subject: [PATCH] Replace ConcurrentLong2ReferenceChainedHashTable with custom
map
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/queue/ChunkUnloadQueue.java b/ca/spottedleaf/moonrise/patches/chunk_system/queue/ChunkUnloadQueue.java
index 7eafc5b7cba23d8dec92ecc1050afe3fd8c9e309..c2d5e83f0bdf98d3c07d6da2bba3b1ebaf7307d5 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/queue/ChunkUnloadQueue.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/queue/ChunkUnloadQueue.java
@@ -7,6 +7,8 @@ import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import it.unimi.dsi.fastutil.longs.LongIterator;
import it.unimi.dsi.fastutil.longs.LongLinkedOpenHashSet;
+import org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable;
+
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -16,7 +18,7 @@ public final class ChunkUnloadQueue {
public final int coordinateShift;
private final AtomicLong orderGenerator = new AtomicLong();
- private final ConcurrentLong2ReferenceChainedHashTable<UnloadSection> unloadSections = new ConcurrentLong2ReferenceChainedHashTable<>();
+ private final LeafConcurrentLong2ReferenceChainedHashTable<UnloadSection> unloadSections = new LeafConcurrentLong2ReferenceChainedHashTable<>();
/*
* Note: write operations do not occur in parallel for any given section.
@@ -32,8 +34,8 @@ public final class ChunkUnloadQueue {
public List<SectionToUnload> retrieveForAllRegions() {
final List<SectionToUnload> ret = new ArrayList<>();
- for (final Iterator<ConcurrentLong2ReferenceChainedHashTable.TableEntry<UnloadSection>> iterator = this.unloadSections.entryIterator(); iterator.hasNext();) {
- final ConcurrentLong2ReferenceChainedHashTable.TableEntry<UnloadSection> entry = iterator.next();
+ for (final Iterator<LeafConcurrentLong2ReferenceChainedHashTable.TableEntry<UnloadSection>> iterator = this.unloadSections.entryIterator(); iterator.hasNext();) {
+ final LeafConcurrentLong2ReferenceChainedHashTable.TableEntry<UnloadSection> entry = iterator.next();
final long key = entry.getKey();
final UnloadSection section = entry.getValue();
final int sectionX = CoordinateUtils.getChunkX(key);
@@ -141,4 +143,4 @@ public final class ChunkUnloadQueue {
this.order = order;
}
}
-}
\ No newline at end of file
+}
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
index 06ac3537f5655d048d770bb004243f207fad9faa..a1f328a5c4ccc030c99762a68008ab1ecebdc06e 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
@@ -40,6 +40,7 @@ import net.minecraft.util.SortedArraySet;
import net.minecraft.util.Unit;
import net.minecraft.world.level.ChunkPos;
import net.minecraft.world.level.chunk.LevelChunk;
+import org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable;
import org.slf4j.Logger;
import java.io.IOException;
import java.text.DecimalFormat;
@@ -71,11 +72,11 @@ public final class ChunkHolderManager {
private static final long PROBE_MARKER = Long.MIN_VALUE + 1;
public final ReentrantAreaLock ticketLockArea;
- private final ConcurrentLong2ReferenceChainedHashTable<SortedArraySet<Ticket<?>>> tickets = new ConcurrentLong2ReferenceChainedHashTable<>();
- private final ConcurrentLong2ReferenceChainedHashTable<Long2IntOpenHashMap> sectionToChunkToExpireCount = new ConcurrentLong2ReferenceChainedHashTable<>();
+ private final LeafConcurrentLong2ReferenceChainedHashTable<SortedArraySet<Ticket<?>>> tickets = new LeafConcurrentLong2ReferenceChainedHashTable<>();
+ private final LeafConcurrentLong2ReferenceChainedHashTable<Long2IntOpenHashMap> sectionToChunkToExpireCount = new LeafConcurrentLong2ReferenceChainedHashTable<>();
final ChunkUnloadQueue unloadQueue;
- private final ConcurrentLong2ReferenceChainedHashTable<NewChunkHolder> chunkHolders = ConcurrentLong2ReferenceChainedHashTable.createWithCapacity(16384, 0.25f);
+ private final LeafConcurrentLong2ReferenceChainedHashTable<NewChunkHolder> chunkHolders = LeafConcurrentLong2ReferenceChainedHashTable.createWithCapacity(16384, 0.25f);
private final ServerLevel world;
private final ChunkTaskScheduler taskScheduler;
private long currentTick;
@@ -1422,9 +1423,9 @@ public final class ChunkHolderManager {
final JsonArray allTicketsJson = new JsonArray();
ret.add("tickets", allTicketsJson);
- for (final Iterator<ConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>>> iterator = this.tickets.entryIterator();
+ for (final Iterator<LeafConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>>> iterator = this.tickets.entryIterator();
iterator.hasNext();) {
- final ConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>> coordinateTickets = iterator.next();
+ final LeafConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>> coordinateTickets = iterator.next();
final long coordinate = coordinateTickets.getKey();
final SortedArraySet<Ticket<?>> tickets = coordinateTickets.getValue();
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
index 310a8f80debadd64c2d962ebf83b7d0505ce6e42..b69d256e2f6bab3c1b90c5f8c42caa3d80cd67a4 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
@@ -10,6 +10,8 @@ import it.unimi.dsi.fastutil.longs.Long2ByteLinkedOpenHashMap;
import it.unimi.dsi.fastutil.shorts.Short2ByteLinkedOpenHashMap;
import it.unimi.dsi.fastutil.shorts.Short2ByteMap;
import it.unimi.dsi.fastutil.shorts.ShortOpenHashSet;
+import org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable;
+
import java.lang.invoke.VarHandle;
import java.util.ArrayDeque;
import java.util.ArrayList;
@@ -35,11 +37,11 @@ public abstract class ThreadedTicketLevelPropagator {
}
private final UpdateQueue updateQueue;
- private final ConcurrentLong2ReferenceChainedHashTable<Section> sections;
+ private final LeafConcurrentLong2ReferenceChainedHashTable<Section> sections;
public ThreadedTicketLevelPropagator() {
this.updateQueue = new UpdateQueue();
- this.sections = new ConcurrentLong2ReferenceChainedHashTable<>();
+ this.sections = new LeafConcurrentLong2ReferenceChainedHashTable<>();
}
// must hold ticket lock for:
diff --git a/ca/spottedleaf/moonrise/patches/starlight/light/StarLightInterface.java b/ca/spottedleaf/moonrise/patches/starlight/light/StarLightInterface.java
index 1487b7d8be435b3fbad2aabd05796965b4775a87..54c425ba84c7c70becdfbde08812afdde777f5a8 100644
--- a/ca/spottedleaf/moonrise/patches/starlight/light/StarLightInterface.java
+++ b/ca/spottedleaf/moonrise/patches/starlight/light/StarLightInterface.java
@@ -27,6 +27,8 @@ import net.minecraft.world.level.chunk.LightChunkGetter;
import net.minecraft.world.level.chunk.status.ChunkStatus;
import net.minecraft.world.level.lighting.LayerLightEventListener;
import net.minecraft.world.level.lighting.LevelLightEngine;
+import org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable;
+
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashSet;
@@ -740,7 +742,7 @@ public final class StarLightInterface {
public static final class ServerLightQueue extends LightQueue {
- private final ConcurrentLong2ReferenceChainedHashTable<ServerChunkTasks> chunkTasks = new ConcurrentLong2ReferenceChainedHashTable<>();
+ private final LeafConcurrentLong2ReferenceChainedHashTable<ServerChunkTasks> chunkTasks = new LeafConcurrentLong2ReferenceChainedHashTable<>();
public ServerLightQueue(final StarLightInterface lightInterface) {
super(lightInterface);
diff --git a/net/minecraft/server/level/ServerChunkCache.java b/net/minecraft/server/level/ServerChunkCache.java
index b1f1b596a597d559aa672a3cb46a03917ad746af..d75f85208da0c7424fc95ae0d8ebb0a725dda0a7 100644
--- a/net/minecraft/server/level/ServerChunkCache.java
+++ b/net/minecraft/server/level/ServerChunkCache.java
@@ -72,7 +72,7 @@ public class ServerChunkCache extends ChunkSource implements ca.spottedleaf.moon
@VisibleForDebug
private NaturalSpawner.SpawnState lastSpawnState;
// Paper start
- private final ca.spottedleaf.concurrentutil.map.ConcurrentLong2ReferenceChainedHashTable<net.minecraft.world.level.chunk.LevelChunk> fullChunks = new ca.spottedleaf.concurrentutil.map.ConcurrentLong2ReferenceChainedHashTable<>();
+ private final org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable<LevelChunk> fullChunks = new org.dreeam.leaf.util.map.spottedleaf.LeafConcurrentLong2ReferenceChainedHashTable<>();
public int getFullChunksCount() {
return this.fullChunks.size();
}

View File

@@ -0,0 +1,151 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 14 Apr 2025 03:02:42 +0200
Subject: [PATCH] Reworked ChunkHolderManager
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
index a1f328a5c4ccc030c99762a68008ab1ecebdc06e..3de8d0fb485e55f3fc38a65c251f109335595468 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
@@ -349,12 +349,13 @@ public final class ChunkHolderManager {
@Override
protected void processLevelUpdates(final Long2ByteLinkedOpenHashMap updates) {
// first the necessary chunkholders must be created, so just update the ticket levels
+ final LeafConcurrentLong2ReferenceChainedHashTable<NewChunkHolder> holderMap = ChunkHolderManager.this.chunkHolders;
for (final Iterator<Long2ByteMap.Entry> iterator = updates.long2ByteEntrySet().fastIterator(); iterator.hasNext();) {
final Long2ByteMap.Entry entry = iterator.next();
final long key = entry.getLongKey();
final int newLevel = convertBetweenTicketLevels((int)entry.getByteValue());
- NewChunkHolder current = ChunkHolderManager.this.chunkHolders.get(key);
+ NewChunkHolder current = holderMap.get(key);
if (current == null && newLevel > MAX_TICKET_LEVEL) {
// not loaded and it shouldn't be loaded!
iterator.remove();
@@ -371,7 +372,7 @@ public final class ChunkHolderManager {
if (current == null) {
// must create
current = ChunkHolderManager.this.createChunkHolder(key);
- ChunkHolderManager.this.chunkHolders.put(key, current);
+ holderMap.put(key, current);
current.updateTicketLevel(newLevel);
} else {
current.updateTicketLevel(newLevel);
@@ -737,20 +738,23 @@ public final class ChunkHolderManager {
final int sectionShift = ((ChunkSystemServerLevel)this.world).moonrise$getRegionChunkShift();
+
final Predicate<Ticket<?>> expireNow = (final Ticket<?> ticket) -> {
long removeDelay = ((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay();
if (removeDelay == NO_TIMEOUT_MARKER) {
return false;
}
--removeDelay;
- ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(removeDelay);
- return removeDelay <= 0L;
+ final long nextDelay = removeDelay - 1;
+ ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(nextDelay);
+ return nextDelay <= 0L;
};
for (final PrimitiveIterator.OfLong iterator = this.sectionToChunkToExpireCount.keyIterator(); iterator.hasNext();) {
final long sectionKey = iterator.nextLong();
if (!this.sectionToChunkToExpireCount.containsKey(sectionKey)) {
+
// removed concurrently
continue;
}
@@ -773,37 +777,62 @@ public final class ChunkHolderManager {
final long chunkKey = entry.getLongKey();
final int expireCount = entry.getIntValue();
+
final SortedArraySet<Ticket<?>> tickets = this.tickets.get(chunkKey);
- final int levelBefore = getTicketLevelAt(tickets);
+ if (tickets == null) {
+ iterator1.remove();
+ continue;
+ }
+ final int levelBefore;
+ final Ticket<?> firstBefore;
final int sizeBefore = tickets.size();
- tickets.removeIf(expireNow);
- final int sizeAfter = tickets.size();
- final int levelAfter = getTicketLevelAt(tickets);
- if (tickets.isEmpty()) {
- this.tickets.remove(chunkKey);
- }
- if (levelBefore != levelAfter) {
- this.updateTicketLevel(chunkKey, levelAfter);
+ if (!tickets.isEmpty()) {
+ firstBefore = tickets.first();
+ levelBefore = firstBefore.getTicketLevel();
+ } else {
+ firstBefore = null;
+ levelBefore = MAX_TICKET_LEVEL + 1;
}
- final int newExpireCount = expireCount - (sizeBefore - sizeAfter);
+ final boolean changed = tickets.removeIf(expireNow);
- if (newExpireCount == expireCount) {
- continue;
- }
+ if (changed) {
+ final int sizeAfter = tickets.size();
+ final int levelAfter;
+ boolean levelMightHaveChanged = true;
- if (newExpireCount != 0) {
- entry.setValue(newExpireCount);
- } else {
- iterator1.remove();
+ if (tickets.isEmpty()) {
+ levelAfter = MAX_TICKET_LEVEL + 1;
+ this.tickets.remove(chunkKey);
+ } else {
+ final Ticket<?> firstAfter = tickets.first();
+ if (firstBefore == firstAfter) {
+ levelMightHaveChanged = false;
+ levelAfter = levelBefore;
+ } else {
+ levelAfter = firstAfter.getTicketLevel();
+ }
+ }
+
+ if (levelMightHaveChanged && levelBefore != levelAfter) {
+ this.updateTicketLevel(chunkKey, levelAfter);
+ }
+
+ final int removedCount = sizeBefore - sizeAfter;
+ if (removedCount > 0) {
+ final int newExpireCount = expireCount - removedCount;
+ if (newExpireCount > 0) {
+ entry.setValue(newExpireCount);
+ } else {
+ iterator1.remove();
+ }
+ }
}
- }
- if (chunkToExpireCount.isEmpty()) {
- this.sectionToChunkToExpireCount.remove(sectionKey);
}
+ if (chunkToExpireCount.isEmpty()) { this.sectionToChunkToExpireCount.remove(sectionKey); }
} finally {
this.ticketLockArea.unlock(ticketLock);
}
@@ -812,6 +841,7 @@ public final class ChunkHolderManager {
this.processTicketUpdates();
}
+
public NewChunkHolder getChunkHolder(final int chunkX, final int chunkZ) {
return this.chunkHolders.get(CoordinateUtils.getChunkKey(chunkX, chunkZ));
}

View File

@@ -0,0 +1,277 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 14 Apr 2025 14:36:57 +0200
Subject: [PATCH] Optimize ThreadedTicketLevelPropagator
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
index b69d256e2f6bab3c1b90c5f8c42caa3d80cd67a4..e8dddc7fca4b0383844be5337a87c4bc1de204b7 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ThreadedTicketLevelPropagator.java
@@ -780,11 +780,13 @@ public abstract class ThreadedTicketLevelPropagator {
// minimum number of bits to represent [0, SECTION_SIZE * SECTION_CACHE_WIDTH)
private static final int COORDINATE_BITS = 9;
private static final int COORDINATE_SIZE = 1 << COORDINATE_BITS;
+
static {
if ((SECTION_SIZE * SECTION_CACHE_WIDTH) > (1 << COORDINATE_BITS)) {
throw new IllegalStateException("Adjust COORDINATE_BITS");
}
}
+
// index = x + (z * SECTION_CACHE_WIDTH)
// (this requires x >= 0 and z >= 0)
private final Section[] sections = new Section[SECTION_CACHE_WIDTH * SECTION_CACHE_WIDTH];
@@ -828,8 +830,8 @@ public abstract class ThreadedTicketLevelPropagator {
// must hold ticket lock for (centerSectionX,centerSectionZ) in radius rad
// must call setupEncodeOffset
private final void setupCaches(final ThreadedTicketLevelPropagator propagator,
- final int centerSectionX, final int centerSectionZ,
- final int rad) {
+ final int centerSectionX, final int centerSectionZ,
+ final int rad) {
for (int dz = -rad; dz <= rad; ++dz) {
for (int dx = -rad; dx <= rad; ++dx) {
final int sectionX = centerSectionX + dx;
@@ -847,29 +849,29 @@ public abstract class ThreadedTicketLevelPropagator {
}
private final void setSectionInCache(final int sectionX, final int sectionZ, final Section section) {
- this.sections[sectionX + SECTION_CACHE_WIDTH*sectionZ + this.sectionIndexOffset] = section;
+ this.sections[sectionX + SECTION_CACHE_WIDTH * sectionZ + this.sectionIndexOffset] = section;
}
private final Section getSection(final int sectionX, final int sectionZ) {
- return this.sections[sectionX + SECTION_CACHE_WIDTH*sectionZ + this.sectionIndexOffset];
+ return this.sections[sectionX + SECTION_CACHE_WIDTH * sectionZ + this.sectionIndexOffset];
}
private final int getLevel(final int posX, final int posZ) {
- final Section section = this.sections[(posX >> SECTION_SHIFT) + SECTION_CACHE_WIDTH*(posZ >> SECTION_SHIFT) + this.sectionIndexOffset];
+ final Section section = this.sections[(posX >> SECTION_SHIFT) + SECTION_CACHE_WIDTH * (posZ >> SECTION_SHIFT) + this.sectionIndexOffset];
if (section != null) {
- return (int)section.levels[(posX & (SECTION_SIZE - 1)) | ((posZ & (SECTION_SIZE - 1)) << SECTION_SHIFT)] & 0xFF;
+ return (int) section.levels[(posX & (SECTION_SIZE - 1)) | ((posZ & (SECTION_SIZE - 1)) << SECTION_SHIFT)] & 0xFF;
}
return 0;
}
private final void setLevel(final int posX, final int posZ, final int to) {
- final Section section = this.sections[(posX >> SECTION_SHIFT) + SECTION_CACHE_WIDTH*(posZ >> SECTION_SHIFT) + this.sectionIndexOffset];
+ final Section section = this.sections[(posX >> SECTION_SHIFT) + SECTION_CACHE_WIDTH * (posZ >> SECTION_SHIFT) + this.sectionIndexOffset];
if (section != null) {
final int index = (posX & (SECTION_SIZE - 1)) | ((posZ & (SECTION_SIZE - 1)) << SECTION_SHIFT);
final short level = section.levels[index];
- section.levels[index] = (short)((level & ~0xFF) | (to & 0xFF));
- this.updatedPositions.put(CoordinateUtils.getChunkKey(posX, posZ), (byte)to);
+ section.levels[index] = (short) ((level & ~0xFF) | (to & 0xFF));
+ this.updatedPositions.put(CoordinateUtils.getChunkKey(posX, posZ), (byte) to);
}
}
@@ -882,8 +884,8 @@ public abstract class ThreadedTicketLevelPropagator {
// next LEVEL_BITS (6) bits: propagated level [0, 63]
// propagation directions bitset (16 bits):
private static final long ALL_DIRECTIONS_BITSET = (
- // z = -1
- (1L << ((1 - 1) | ((1 - 1) << 2))) |
+ // z = -1
+ (1L << ((1 - 1) | ((1 - 1) << 2))) |
(1L << ((1 + 0) | ((1 - 1) << 2))) |
(1L << ((1 + 1) | ((1 - 1) << 2))) |
@@ -920,7 +922,7 @@ public abstract class ThreadedTicketLevelPropagator {
}
private void ch(long bs, int shift) {
- int bitset = (int)(bs >>> shift);
+ int bitset = (int) (bs >>> shift);
for (int i = 0, len = Integer.bitCount(bitset); i < len; ++i) {
final int set = Integer.numberOfTrailingZeros(bitset);
final int tailingBit = (-bitset) & bitset;
@@ -1000,27 +1002,38 @@ public abstract class ThreadedTicketLevelPropagator {
final int decodeOffsetZ = -this.encodeOffsetZ;
final int encodeOffset = this.coordinateOffset;
final int sectionOffset = this.sectionIndexOffset;
+ final Section[] sectionsArray = this.sections;
final Long2ByteLinkedOpenHashMap updatedPositions = this.updatedPositions;
while (queueReadIndex < queueLength) {
final long queueValue = queue[queueReadIndex++];
- final int posX = ((int)queueValue & (COORDINATE_SIZE - 1)) + decodeOffsetX;
- final int posZ = (((int)queueValue >>> COORDINATE_BITS) & (COORDINATE_SIZE - 1)) + decodeOffsetZ;
- final int propagatedLevel = ((int)queueValue >>> (COORDINATE_BITS + COORDINATE_BITS)) & (LEVEL_COUNT - 1);
+ final int posX = ((int) queueValue & (COORDINATE_SIZE - 1)) + decodeOffsetX;
+ final int posZ = (((int) queueValue >>> COORDINATE_BITS) & (COORDINATE_SIZE - 1)) + decodeOffsetZ;
+ final int propagatedLevel = ((int) queueValue >>> (COORDINATE_BITS + COORDINATE_BITS)) & (LEVEL_COUNT - 1);
// note: the above code requires coordinate bits * 2 < 32
// bitset is 16 bits
- int propagateDirectionBitset = (int)(queueValue >>> (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) & ((1 << 16) - 1);
+ int propagateDirectionBitset = (int) (queueValue >>> (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) & ((1 << 16) - 1);
if ((queueValue & FLAG_RECHECK_LEVEL) != 0L) {
- if (this.getLevel(posX, posZ) != propagatedLevel) {
+ final int sectionX = posX >> SECTION_SHIFT;
+ final int sectionZ = posZ >> SECTION_SHIFT;
+ final Section section = sectionsArray[sectionX + (sectionZ * SECTION_CACHE_WIDTH) + sectionOffset];
+ final int localIdx = (posX & (SECTION_SIZE - 1)) | ((posZ & (SECTION_SIZE - 1)) << SECTION_SHIFT);
+ if ((section.levels[localIdx] & 0xFF) != propagatedLevel) {
// not at the level we expect, so something changed.
continue;
}
} else if ((queueValue & FLAG_WRITE_LEVEL) != 0L) {
// these are used to restore sources after a propagation decrease
- this.setLevel(posX, posZ, propagatedLevel);
+ final int sectionX = posX >> SECTION_SHIFT;
+ final int sectionZ = posZ >> SECTION_SHIFT;
+ final Section section = sectionsArray[sectionX + (sectionZ * SECTION_CACHE_WIDTH) + sectionOffset];
+ final int localIdx = (posX & (SECTION_SIZE - 1)) | ((posZ & (SECTION_SIZE - 1)) << SECTION_SHIFT);
+ final short currentLevel = section.levels[localIdx];
+ section.levels[localIdx] = (short) ((currentLevel & ~0xFF) | (propagatedLevel & 0xFF));
+ updatedPositions.put(CoordinateUtils.getChunkKey(posX, posZ), (byte) propagatedLevel);
}
// this bitset represents the values that we have not propagated to
@@ -1036,8 +1049,8 @@ public abstract class ThreadedTicketLevelPropagator {
// must guarantee that either we propagate everything in 1 radius or we partially propagate for 1 radius
// but the rest not propagated are already handled
long currentPropagation = ~(
- // z = -1
- (1L << ((2 - 1) | ((2 - 1) << 3))) |
+ // z = -1
+ (1L << ((2 - 1) | ((2 - 1) << 3))) |
(1L << ((2 + 0) | ((2 - 1) << 3))) |
(1L << ((2 + 1) | ((2 - 1) << 3))) |
@@ -1095,7 +1108,7 @@ public abstract class ThreadedTicketLevelPropagator {
currentPropagation ^= (bitsetLine1 | bitsetLine2 | bitsetLine3);
// now try to propagate
- final Section section = this.sections[sectionIndex];
+ final Section section = sectionsArray[sectionIndex];
// lower 8 bits are current level, next upper 7 bits are source level, next 1 bit is updated source flag
final short currentStoredLevel = section.levels[localIndex];
@@ -1106,8 +1119,8 @@ public abstract class ThreadedTicketLevelPropagator {
}
// update level
- section.levels[localIndex] = (short)((currentStoredLevel & ~0xFF) | (toPropagate & 0xFF));
- updatedPositions.putAndMoveToLast(CoordinateUtils.getChunkKey(offX, offZ), (byte)toPropagate);
+ section.levels[localIndex] = (short) ((currentStoredLevel & ~0xFF) | (toPropagate & 0xFF));
+ updatedPositions.putAndMoveToLast(CoordinateUtils.getChunkKey(offX, offZ), (byte) toPropagate);
// queue next
if (toPropagate > 1) {
@@ -1115,7 +1128,7 @@ public abstract class ThreadedTicketLevelPropagator {
// the child bitset is 4x4, so we just shift each line by 4
// add the propagation bitset offset to each line to make it easy to OR it into the propagation queue value
final long childPropagation =
- ((bitsetLine1 >>> (start)) << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = -1
+ ((bitsetLine1 >>> (start)) << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = -1
((bitsetLine2 >>> (start + 8)) << (4 + COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = 0
((bitsetLine3 >>> (start + (8 + 8))) << (4 + 4 + COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)); // z = 1
@@ -1125,7 +1138,7 @@ public abstract class ThreadedTicketLevelPropagator {
queue = this.resizeIncreaseQueue();
}
queue[queueLength++] =
- ((long)(offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
+ ((long) (offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
((toPropagate & (LEVEL_COUNT - 1L)) << (COORDINATE_BITS + COORDINATE_BITS)) |
childPropagation; //(ALL_DIRECTIONS_BITSET << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS));
continue;
@@ -1146,18 +1159,19 @@ public abstract class ThreadedTicketLevelPropagator {
final int decodeOffsetZ = -this.encodeOffsetZ;
final int encodeOffset = this.coordinateOffset;
final int sectionOffset = this.sectionIndexOffset;
+ final Section[] sectionsArray = this.sections;
final Long2ByteLinkedOpenHashMap updatedPositions = this.updatedPositions;
while (queueReadIndex < queueLength) {
final long queueValue = queue[queueReadIndex++];
- final int posX = ((int)queueValue & (COORDINATE_SIZE - 1)) + decodeOffsetX;
- final int posZ = (((int)queueValue >>> COORDINATE_BITS) & (COORDINATE_SIZE - 1)) + decodeOffsetZ;
- final int propagatedLevel = ((int)queueValue >>> (COORDINATE_BITS + COORDINATE_BITS)) & (LEVEL_COUNT - 1);
+ final int posX = ((int) queueValue & (COORDINATE_SIZE - 1)) + decodeOffsetX;
+ final int posZ = (((int) queueValue >>> COORDINATE_BITS) & (COORDINATE_SIZE - 1)) + decodeOffsetZ;
+ final int propagatedLevel = ((int) queueValue >>> (COORDINATE_BITS + COORDINATE_BITS)) & (LEVEL_COUNT - 1);
// note: the above code requires coordinate bits * 2 < 32
// bitset is 16 bits
- int propagateDirectionBitset = (int)(queueValue >>> (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) & ((1 << 16) - 1);
+ int propagateDirectionBitset = (int) (queueValue >>> (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) & ((1 << 16) - 1);
// this bitset represents the values that we have not propagated to
// this bitset lets us determine what directions the neighbours we set should propagate to, in most cases
@@ -1172,8 +1186,8 @@ public abstract class ThreadedTicketLevelPropagator {
// must guarantee that either we propagate everything in 1 radius or we partially propagate for 1 radius
// but the rest not propagated are already handled
long currentPropagation = ~(
- // z = -1
- (1L << ((2 - 1) | ((2 - 1) << 3))) |
+ // z = -1
+ (1L << ((2 - 1) | ((2 - 1) << 3))) |
(1L << ((2 + 0) | ((2 - 1) << 3))) |
(1L << ((2 + 1) | ((2 - 1) << 3))) |
@@ -1229,7 +1243,7 @@ public abstract class ThreadedTicketLevelPropagator {
final long bitsetLine3 = currentPropagation & (7L << (start + (8 + 8)));
// now try to propagate
- final Section section = this.sections[sectionIndex];
+ final Section section = sectionsArray[sectionIndex];
// lower 8 bits are current level, next upper 7 bits are source level, next 1 bit is updated source flag
final short currentStoredLevel = section.levels[localIndex];
@@ -1246,7 +1260,7 @@ public abstract class ThreadedTicketLevelPropagator {
increaseQueue = this.resizeIncreaseQueue();
}
increaseQueue[increaseQueueLength++] =
- ((long)(offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
+ ((long) (offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
((currentLevel & (LEVEL_COUNT - 1L)) << (COORDINATE_BITS + COORDINATE_BITS)) |
(FLAG_RECHECK_LEVEL | (ALL_DIRECTIONS_BITSET << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)));
continue;
@@ -1257,8 +1271,8 @@ public abstract class ThreadedTicketLevelPropagator {
//currentPropagation ^= (bitsetLine1 | bitsetLine2 | bitsetLine3);
// update level
- section.levels[localIndex] = (short)((currentStoredLevel & ~0xFF));
- updatedPositions.putAndMoveToLast(CoordinateUtils.getChunkKey(offX, offZ), (byte)0);
+ section.levels[localIndex] = (short) ((currentStoredLevel & ~0xFF));
+ updatedPositions.putAndMoveToLast(CoordinateUtils.getChunkKey(offX, offZ), (byte) 0);
if (sourceLevel != 0) {
// re-propagate source
@@ -1267,7 +1281,7 @@ public abstract class ThreadedTicketLevelPropagator {
increaseQueue = this.resizeIncreaseQueue();
}
increaseQueue[increaseQueueLength++] =
- ((long)(offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
+ ((long) (offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
((sourceLevel & (LEVEL_COUNT - 1L)) << (COORDINATE_BITS + COORDINATE_BITS)) |
(FLAG_WRITE_LEVEL | (ALL_DIRECTIONS_BITSET << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)));
}
@@ -1278,7 +1292,7 @@ public abstract class ThreadedTicketLevelPropagator {
// the child bitset is 4x4, so we just shift each line by 4
// add the propagation bitset offset to each line to make it easy to OR it into the propagation queue value
final long childPropagation =
- ((bitsetLine1 >>> (start)) << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = -1
+ ((bitsetLine1 >>> (start)) << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = -1
((bitsetLine2 >>> (start + 8)) << (4 + COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)) | // z = 0
((bitsetLine3 >>> (start + (8 + 8))) << (4 + 4 + COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)); // z = 1
@@ -1288,7 +1302,7 @@ public abstract class ThreadedTicketLevelPropagator {
queue = this.resizeDecreaseQueue();
}
queue[queueLength++] =
- ((long)(offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
+ ((long) (offX + (offZ << COORDINATE_BITS) + encodeOffset) & ((1L << (COORDINATE_BITS + COORDINATE_BITS)) - 1)) |
((toPropagate & (LEVEL_COUNT - 1L)) << (COORDINATE_BITS + COORDINATE_BITS)) |
(ALL_DIRECTIONS_BITSET << (COORDINATE_BITS + COORDINATE_BITS + LEVEL_BITS)); //childPropagation;
continue;

View File

@@ -0,0 +1,29 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 14 Apr 2025 18:07:21 +0200
Subject: [PATCH] Optimise MobEffectUtil#getDigSpeedAmplification
diff --git a/net/minecraft/world/effect/MobEffectUtil.java b/net/minecraft/world/effect/MobEffectUtil.java
index cbf1b6af928aa439c3264b302e5f1a1ddd4c14f0..c59a503ef8bc2dabcf9f7c85c8d93fb1fcadf71f 100644
--- a/net/minecraft/world/effect/MobEffectUtil.java
+++ b/net/minecraft/world/effect/MobEffectUtil.java
@@ -29,12 +29,14 @@ public final class MobEffectUtil {
public static int getDigSpeedAmplification(LivingEntity entity) {
int i = 0;
int i1 = 0;
- if (entity.hasEffect(MobEffects.DIG_SPEED)) {
- i = entity.getEffect(MobEffects.DIG_SPEED).getAmplifier();
+ MobEffectInstance digEffect = entity.getEffect(MobEffects.DIG_SPEED);
+ if (digEffect != null) {
+ i = digEffect.getAmplifier();
}
- if (entity.hasEffect(MobEffects.CONDUIT_POWER)) {
- i1 = entity.getEffect(MobEffects.CONDUIT_POWER).getAmplifier();
+ MobEffectInstance conduitEffect = entity.getEffect(MobEffects.CONDUIT_POWER);
+ if (conduitEffect != null) {
+ i1 = conduitEffect.getAmplifier();
}
return Math.max(i, i1);

View File

@@ -1,9 +1,47 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Fri, 28 Feb 2025 01:35:49 +0100
Subject: [PATCH] Optimize chunkUnload
Date: Mon, 14 Apr 2025 20:07:52 +0200
Subject: [PATCH] Optimise chunkUnloads
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/NewChunkHolder.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/NewChunkHolder.java
index e4a5fa25ed368fc4662c30934da2963ef446d782..62ad5fe1196cd982b0d48b1e4903d036262ac54b 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/NewChunkHolder.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/NewChunkHolder.java
@@ -1753,23 +1753,20 @@ public final class NewChunkHolder {
chunk.tryMarkSaved();
final CallbackCompletable<CompoundTag> completable = new CallbackCompletable<>();
-
- final Runnable run = () -> {
- final CompoundTag data = chunkData.write();
-
- completable.complete(data);
-
- if (unloading) {
- NewChunkHolder.this.completeAsyncUnloadDataSave(MoonriseRegionFileIO.RegionFileType.CHUNK_DATA, data);
- }
- };
-
final PrioritisedExecutor.PrioritisedTask task;
+
if (unloading) {
- this.chunkDataUnload.toRun().setRunnable(run);
+ this.chunkDataUnload.toRun().setRunnable(() -> {
+ final CompoundTag data = chunkData.write();
+ completable.complete(data);
+ NewChunkHolder.this.completeAsyncUnloadDataSave(MoonriseRegionFileIO.RegionFileType.CHUNK_DATA, data);
+ });
task = this.chunkDataUnload.task();
} else {
- task = this.scheduler.saveExecutor.createTask(run);
+ task = this.scheduler.saveExecutor.createTask(() -> {
+ final CompoundTag data = chunkData.write();
+ completable.complete(data);
+ });
}
task.queue();
diff --git a/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java b/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java
index 4ca68a903e67606fc4ef0bfa9862a73797121c8b..bed3a64388bb43e47c2ba4e67f7dde5b990d9578 100644
--- a/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java
@@ -211,3 +249,128 @@ index b8ac6a9ba7b56ccd034757f7d135d272b8e69e90..dc158e981199b507531af810ff9ced3c
return new LevelChunkSection(this);
}
}
diff --git a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
index e9ece9b618b0a9eb82b9f07a09ee6cb60cf7ec16..18d2ec110fc6670edb079eccf448389dc365eb88 100644
--- a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
+++ b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
@@ -526,14 +526,14 @@ public record SerializableChunkData(
throw new IllegalArgumentException("Chunk can't be serialized: " + chunk);
} else {
ChunkPos pos = chunk.getPos();
- List<SerializableChunkData.SectionData> list = new ArrayList<>(); final List<SerializableChunkData.SectionData> sectionsList = list; // Paper - starlight - OBFHELPER
- LevelChunkSection[] sections = chunk.getSections();
- LevelLightEngine lightEngine = level.getChunkSource().getLightEngine();
// Paper start - starlight
final int minLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinLightSection(level);
final int maxLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMaxLightSection(level);
final int minBlockSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinSection(level);
+ // Pre-allocate with correct capacity to avoid resizing
+ final int expectedSectionCount = maxLightSection - minLightSection + 1;
+ List<SerializableChunkData.SectionData> list = new ArrayList<>(expectedSectionCount);
final LevelChunkSection[] chunkSections = chunk.getSections();
final ca.spottedleaf.moonrise.patches.starlight.light.SWMRNibbleArray[] blockNibbles = ((ca.spottedleaf.moonrise.patches.starlight.chunk.StarlightChunk)chunk).starlight$getBlockNibbles();
@@ -551,10 +551,18 @@ public record SerializableChunkData(
continue;
}
+ DataLayer blockDataLayer = null;
+ if (blockNibble != null && blockNibble.data != null) {
+ blockDataLayer = new DataLayer(blockNibble.data);
+ }
+
+ DataLayer skyDataLayer = null;
+ if (skyNibble != null && skyNibble.data != null) {
+ skyDataLayer = new DataLayer(skyNibble.data);
+ }
+
final SerializableChunkData.SectionData sectionData = new SerializableChunkData.SectionData(
- lightSection, chunkSection,
- blockNibble == null ? null : (blockNibble.data == null ? null : new DataLayer(blockNibble.data)),
- skyNibble == null ? null : (skyNibble.data == null ? null : new DataLayer(skyNibble.data))
+ lightSection, chunkSection, blockDataLayer, skyDataLayer
);
if (blockNibble != null) {
@@ -565,28 +573,42 @@ public record SerializableChunkData(
((ca.spottedleaf.moonrise.patches.starlight.storage.StarlightSectionData)(Object)sectionData).starlight$setSkyLightState(skyNibble.state);
}
- sectionsList.add(sectionData);
+ list.add(sectionData);
}
// Paper end - starlight
- List<CompoundTag> list1 = new ArrayList<>(chunk.getBlockEntitiesPos().size());
+ // Pre-allocate block entities list with exact size needed
+ final int blockEntityCount = chunk.getBlockEntitiesPos().size();
+ List<CompoundTag> list1 = blockEntityCount > 0 ? new ArrayList<>(blockEntityCount) : java.util.Collections.emptyList();
- for (BlockPos blockPos : chunk.getBlockEntitiesPos()) {
- CompoundTag blockEntityNbtForSaving = chunk.getBlockEntityNbtForSaving(blockPos, level.registryAccess());
- if (blockEntityNbtForSaving != null) {
- list1.add(blockEntityNbtForSaving);
+ if (blockEntityCount > 0) {
+ for (BlockPos blockPos : chunk.getBlockEntitiesPos()) {
+ CompoundTag blockEntityNbtForSaving = chunk.getBlockEntityNbtForSaving(blockPos, level.registryAccess());
+ if (blockEntityNbtForSaving != null) {
+ list1.add(blockEntityNbtForSaving);
+ }
}
}
- List<CompoundTag> list2 = new ArrayList<>();
+ // For entities, use an initial estimated capacity if it's a ProtoChunk
+ List<CompoundTag> list2;
long[] longs = null;
+
if (chunk.getPersistedStatus().getChunkType() == ChunkType.PROTOCHUNK) {
ProtoChunk protoChunk = (ProtoChunk)chunk;
- list2.addAll(protoChunk.getEntities());
+ int entitySize = protoChunk.getEntities().size();
+ if (entitySize > 0) {
+ list2 = new ArrayList<>(Math.max(16, entitySize));
+ list2.addAll(protoChunk.getEntities());
+ } else {
+ list2 = java.util.Collections.emptyList();
+ }
CarvingMask carvingMask = protoChunk.getCarvingMask();
if (carvingMask != null) {
longs = carvingMask.toArray();
}
+ } else {
+ list2 = java.util.Collections.emptyList();
}
Map<Heightmap.Types, long[]> map = new EnumMap<>(Heightmap.Types.class);
@@ -594,14 +616,25 @@ public record SerializableChunkData(
for (Entry<Heightmap.Types, Heightmap> entry : chunk.getHeightmaps()) {
if (chunk.getPersistedStatus().heightmapsAfter().contains(entry.getKey())) {
long[] rawData = entry.getValue().getRawData();
- map.put(entry.getKey(), (long[])rawData.clone());
+ map.put(entry.getKey(), Arrays.copyOf(rawData, rawData.length));
}
}
ChunkAccess.PackedTicks ticksForSerialization = chunk.getTicksForSerialization(level.getGameTime());
- ShortList[] lists = Arrays.stream(chunk.getPostProcessing())
- .map(list3 -> list3 != null ? new ShortArrayList(list3) : null)
- .toArray(ShortList[]::new);
+ // Leaf start - Some Optimizations on SerializableChunkData
+ ShortList[] postProcessing = chunk.getPostProcessing();
+ ShortList[] lists = new ShortList[postProcessing.length];
+ for (int i = 0; i < postProcessing.length; i++) {
+ ShortList source = postProcessing[i];
+ // Only create a new list if there's actual data to copy
+ if (source != null) {
+ int size = source.size();
+ if (size > 0) {
+ lists[i] = new ShortArrayList(size);
+ lists[i].addAll(source);
+ }
+ }
+ }
CompoundTag compoundTag = packStructureData(
StructurePieceSerializationContext.fromLevel(level), pos, chunk.getAllStarts(), chunk.getAllReferences()
);