9
0
mirror of https://github.com/Winds-Studio/Leaf.git synced 2025-12-24 17:39:15 +00:00

Chunk improvements (#231)

* perf: SpatialPlayerIndex for isChunkNearPlayer

* perf: ensureCapacity with collectTickingChunks

* perf: optimize getSlopeDistance

* perf: optimize AABB Intersections

* perf: implement custom arrays for regions and caches

* perf: Improve SortedArraySet sorting (needs testing)

* rebase 1.21.4

* perf: optimize ClientBoundLightUpdatePacketData

* perf: O(1) Array Writes during Chunk Loading

* perf: Optimize LinearPalette (no not the linear format)

* perf: Rewrite ConcurrentLongHashSet

* rebase 1.21.4

* Fix Multithreaded Tracker (#236)

* duke gonna arrest me

* i hate git v2

* rebase

* dont worry ill change the name of this patch

* perf: Rewrite ConcurrentLongHashSet again

* perf: Optimize sendChunk

* [ci skip]

* cleanup

* aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa

* cleanup

* remove streams on LinearPalette and SerializableChunkData

* actually commit them lmao

* actually commit them lmao 2

* fix

* rebase

* perf: clone less (could help with skyblocks)

* perf: more unload stuff

* perf: manual loop unrolling and bulk copy

* initial size for SerializeableChunkData

* perf: async chunkSend

* cleanup asyncChunkSend

* remove experimental tag

* rebase

---------

Co-authored-by: Creeam <102713261+HaHaWTH@users.noreply.github.com>
Co-authored-by: Dreeam <61569423+Dreeam-qwq@users.noreply.github.com>
This commit is contained in:
Taiyou
2025-03-05 20:45:26 +01:00
committed by GitHub
parent ce59731e6f
commit cd7689b16f
16 changed files with 1749 additions and 160 deletions

View File

@@ -0,0 +1,33 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 16 Feb 2025 01:13:04 +0100
Subject: [PATCH] ensureCapacity with collectTickingChunks
diff --git a/net/minecraft/server/level/ServerChunkCache.java b/net/minecraft/server/level/ServerChunkCache.java
index dd956431bb882daa70267685f2283d0c358336be..ca12b33b1297afcb211bf9a4e423ee11c1ec57e9 100644
--- a/net/minecraft/server/level/ServerChunkCache.java
+++ b/net/minecraft/server/level/ServerChunkCache.java
@@ -573,17 +573,13 @@ public class ServerChunkCache extends ChunkSource implements ca.spottedleaf.moon
final ServerChunkCache.ChunkAndHolder[] raw = tickingChunks.getRawDataUnchecked();
final int size = tickingChunks.size();
- final ChunkMap chunkMap = this.chunkMap;
+ // Directly add all pre-filtered ticking chunks to output
+ if (output instanceof ArrayList<LevelChunk> arrayList) {
+ arrayList.ensureCapacity(size);
+ }
for (int i = 0; i < size; ++i) {
- final ServerChunkCache.ChunkAndHolder chunkAndHolder = raw[i];
- final LevelChunk levelChunk = chunkAndHolder.chunk();
-
- if (!this.isChunkNearPlayer(chunkMap, levelChunk.getPos(), levelChunk)) {
- continue;
- }
-
- output.add(levelChunk);
+ output.add(raw[i].chunk());
}
// Paper end - chunk tick iteration optimisation
}

View File

@@ -0,0 +1,43 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Fri, 21 Feb 2025 15:06:55 +0100
Subject: [PATCH] Bulk writes to writeLongArray during chunk loading
diff --git a/net/minecraft/network/FriendlyByteBuf.java b/net/minecraft/network/FriendlyByteBuf.java
index abb0141426fd716e79a947b9498a8351daa342fc..838a7d64e717759f506e64c74eb76eea05f72811 100644
--- a/net/minecraft/network/FriendlyByteBuf.java
+++ b/net/minecraft/network/FriendlyByteBuf.java
@@ -341,9 +341,30 @@ public class FriendlyByteBuf extends ByteBuf {
public FriendlyByteBuf writeLongArray(long[] array) {
this.writeVarInt(array.length);
+ if (array.length == 0) {
+ return this;
+ }
+
+ this.source.ensureWritable(array.length * Long.BYTES);
+ int writerIndex = this.source.writerIndex();
+
+ if (this.source.hasArray()) {
+ byte[] dest = this.source.array();
+ int offset = this.source.arrayOffset() + writerIndex;
- for (long l : array) {
- this.writeLong(l);
+ ByteBuffer buf = ByteBuffer.wrap(dest, offset, array.length * Long.BYTES).order(this.source.order());
+ buf.asLongBuffer().put(array);
+
+ this.source.writerIndex(writerIndex + array.length * Long.BYTES);
+ } else if (this.source.nioBufferCount() > 0) {
+ ByteBuffer nioBuf = this.source.nioBuffer(writerIndex, array.length * Long.BYTES);
+ nioBuf.asLongBuffer().put(array);
+ this.source.writerIndex(writerIndex + array.length * Long.BYTES);
+ } else {
+ ByteBuffer temp = ByteBuffer.allocate(array.length * Long.BYTES).order(this.source.order());
+ temp.asLongBuffer().put(array);
+ temp.rewind();
+ this.source.writeBytes(temp);
}
return this;

View File

@@ -0,0 +1,145 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 16 Feb 2025 15:15:16 +0100
Subject: [PATCH] Use BFS on getSlopeDistance in fluids
diff --git a/net/minecraft/world/level/material/FlowingFluid.java b/net/minecraft/world/level/material/FlowingFluid.java
index 4c2c2efd5380ff1fa5ad7553b51babae20f516ae..bd556e02f4e0822e2a62f53ceca7ad61bbfb736b 100644
--- a/net/minecraft/world/level/material/FlowingFluid.java
+++ b/net/minecraft/world/level/material/FlowingFluid.java
@@ -2,12 +2,16 @@ package net.minecraft.world.level.material;
import com.google.common.collect.Maps;
import it.unimi.dsi.fastutil.objects.Object2ByteLinkedOpenHashMap;
+import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
import it.unimi.dsi.fastutil.shorts.Short2BooleanMap;
import it.unimi.dsi.fastutil.shorts.Short2BooleanOpenHashMap;
import it.unimi.dsi.fastutil.shorts.Short2ObjectMap;
import it.unimi.dsi.fastutil.shorts.Short2ObjectOpenHashMap;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Queue;
+import java.util.Set;
+
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.server.level.ServerLevel;
@@ -342,30 +346,65 @@ public abstract class FlowingFluid extends Fluid {
protected abstract void beforeDestroyingBlock(LevelAccessor level, BlockPos pos, BlockState state);
protected int getSlopeDistance(LevelReader level, BlockPos pos, int depth, Direction direction, BlockState state, FlowingFluid.SpreadContext spreadContext) {
- int i = 1000;
+ int maxDepth = this.getSlopeFindDistance(level);
+ int initialCapacity = (int) Math.pow(4, maxDepth); // Pre-size based on max possible nodes
+ Queue<QueueEntry> queue = new java.util.ArrayDeque<>(initialCapacity);
+ it.unimi.dsi.fastutil.longs.LongSet visited = new it.unimi.dsi.fastutil.longs.LongOpenHashSet();
+
+ queue.add(new QueueEntry(pos, depth, direction));
+ visited.add(encodeVisited(pos, direction));
+
+ while (!queue.isEmpty()) {
+ QueueEntry current = queue.poll();
+ BlockPos currentPos = current.pos;
+ int currentDepth = current.depth;
+ Direction excludeDir = current.excludeDir;
+
+ for (Direction dir : Direction.Plane.HORIZONTAL) {
+ if (dir == excludeDir) continue;
+
+ BlockPos nextPos = currentPos.relative(dir);
+ Direction nextExcludeDir = dir.getOpposite();
+ long nextKey = encodeVisited(nextPos, nextExcludeDir);
+
+ if (!visited.add(nextKey)) continue;
+
+ BlockState blockState = spreadContext.getBlockStateIfLoaded(nextPos);
+ if (blockState == null) continue;
- for (Direction direction1 : Direction.Plane.HORIZONTAL) {
- if (direction1 != direction) {
- BlockPos blockPos = pos.relative(direction1);
- BlockState blockState = spreadContext.getBlockStateIfLoaded(blockPos); // Paper - Prevent chunk loading from fluid flowing
- if (blockState == null) continue; // Paper - Prevent chunk loading from fluid flowing
FluidState fluidState = blockState.getFluidState();
- if (this.canPassThrough(level, this.getFlowing(), pos, state, direction1, blockPos, blockState, fluidState)) {
- if (spreadContext.isHole(blockPos)) {
- return depth;
+ if (this.canPassThrough(level, this.getFlowing(), currentPos, state, dir, nextPos, blockState, fluidState)) {
+ if (spreadContext.isHole(nextPos)) {
+ return currentDepth;
}
- if (depth < this.getSlopeFindDistance(level)) {
- int slopeDistance = this.getSlopeDistance(level, blockPos, depth + 1, direction1.getOpposite(), blockState, spreadContext);
- if (slopeDistance < i) {
- i = slopeDistance;
- }
+ if (currentDepth + 1 <= maxDepth) {
+ queue.add(new QueueEntry(nextPos, currentDepth + 1, nextExcludeDir));
}
}
}
}
- return i;
+ return 1000;
+ }
+
+ // Encode BlockPos and Direction into a long (x: 26 bits, z: 26 bits, dir: 4 bits)
+ private static long encodeVisited(BlockPos pos, Direction dir) {
+ return ((long) (pos.getX() & 0x3FFFFFF) << 38)
+ | ((long) (pos.getZ() & 0x3FFFFFF) << 12)
+ | (dir.ordinal() & 0xF);
+ }
+
+ private static class QueueEntry {
+ final BlockPos pos;
+ final int depth;
+ final Direction excludeDir;
+
+ QueueEntry(BlockPos pos, int depth, Direction excludeDir) {
+ this.pos = pos.immutable();
+ this.depth = depth;
+ this.excludeDir = excludeDir;
+ }
}
boolean isWaterHole(BlockGetter level, BlockPos pos, BlockState state, BlockPos belowPos, BlockState belowState) {
@@ -612,12 +651,30 @@ public abstract class FlowingFluid extends Fluid {
}
public boolean isHole(BlockPos pos) {
- return this.holeCache.computeIfAbsent(this.getCacheKey(pos), s -> {
- BlockState blockState = this.getBlockState(pos, s);
- BlockPos blockPos = pos.below();
- BlockState blockState1 = this.level.getBlockState(blockPos);
- return FlowingFluid.this.isWaterHole(this.level, pos, blockState, blockPos, blockState1);
- });
+ short key = this.getCacheKey(pos);
+ // Fast path - check if we already have the result
+ if (this.holeCache.containsKey(key)) {
+ return this.holeCache.get(key);
+ }
+ // Get cached block state for current position
+ BlockState blockState = this.stateCache.get(key);
+ if (blockState == null) {
+ blockState = this.level.getBlockState(pos);
+ this.stateCache.put(key, blockState);
+ }
+ // Get position below and its key
+ BlockPos belowPos = pos.below();
+ short belowKey = this.getCacheKey(belowPos);
+ // Get cached block state for position below
+ BlockState belowState = this.stateCache.get(belowKey);
+ if (belowState == null) {
+ belowState = this.level.getBlockState(belowPos);
+ this.stateCache.put(belowKey, belowState);
+ }
+ // Compute result and cache it
+ boolean result = FlowingFluid.this.isWaterHole(this.level, pos, blockState, belowPos, belowState);
+ this.holeCache.put(key, result);
+ return result;
}
private short getCacheKey(BlockPos pos) {

View File

@@ -0,0 +1,69 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 16 Feb 2025 19:03:23 +0100
Subject: [PATCH] Optimize AABB
diff --git a/net/minecraft/world/phys/AABB.java b/net/minecraft/world/phys/AABB.java
index f64c04b32dd2d0fe143fc8bf9f498e52beb66a58..3a41291aa9c0d728b54cf962360303750725bc82 100644
--- a/net/minecraft/world/phys/AABB.java
+++ b/net/minecraft/world/phys/AABB.java
@@ -220,13 +220,14 @@ public class AABB {
}
public AABB intersect(AABB other) {
- double max = Math.max(this.minX, other.minX);
- double max1 = Math.max(this.minY, other.minY);
- double max2 = Math.max(this.minZ, other.minZ);
- double min = Math.min(this.maxX, other.maxX);
- double min1 = Math.min(this.maxY, other.maxY);
- double min2 = Math.min(this.maxZ, other.maxZ);
- return new AABB(max, max1, max2, min, min1, min2);
+ return new AABB(
+ this.minX > other.minX ? this.minX : other.minX,
+ this.minY > other.minY ? this.minY : other.minY,
+ this.minZ > other.minZ ? this.minZ : other.minZ,
+ this.maxX < other.maxX ? this.maxX : other.maxX,
+ this.maxY < other.maxY ? this.maxY : other.maxY,
+ this.maxZ < other.maxZ ? this.maxZ : other.maxZ
+ );
}
public AABB minmax(AABB other) {
@@ -258,16 +259,33 @@ public class AABB {
}
public boolean intersects(AABB other) {
- return this.intersects(other.minX, other.minY, other.minZ, other.maxX, other.maxY, other.maxZ);
+ // Removed redundant method call overhead
+ return this.minX < other.maxX &&
+ this.maxX > other.minX &&
+ this.minY < other.maxY &&
+ this.maxY > other.minY &&
+ this.minZ < other.maxZ &&
+ this.maxZ > other.minZ;
}
public boolean intersects(double x1, double y1, double z1, double x2, double y2, double z2) {
- return this.minX < x2 && this.maxX > x1 && this.minY < y2 && this.maxY > y1 && this.minZ < z2 && this.maxZ > z1;
+ // No temporary variables needed, direct comparison
+ return this.minX < x2 &&
+ this.maxX > x1 &&
+ this.minY < y2 &&
+ this.maxY > y1 &&
+ this.minZ < z2 &&
+ this.maxZ > z1;
}
public boolean intersects(Vec3 min, Vec3 max) {
return this.intersects(
- Math.min(min.x, max.x), Math.min(min.y, max.y), Math.min(min.z, max.z), Math.max(min.x, max.x), Math.max(min.y, max.y), Math.max(min.z, max.z)
+ min.x < max.x ? min.x : max.x,
+ min.y < max.y ? min.y : max.y,
+ min.z < max.z ? min.z : max.z,
+ min.x > max.x ? min.x : max.x,
+ min.y > max.y ? min.y : max.y,
+ min.z > max.z ? min.z : max.z
);
}

View File

@@ -0,0 +1,84 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Thu, 20 Feb 2025 15:37:39 +0100
Subject: [PATCH] Improve sorting in SortedArraySet
diff --git a/net/minecraft/util/SortedArraySet.java b/net/minecraft/util/SortedArraySet.java
index 339b19e88567be382e550ed54477fabd58d51faa..bde5b4cb4cda003acd7343b16f09f915b71fe3f2 100644
--- a/net/minecraft/util/SortedArraySet.java
+++ b/net/minecraft/util/SortedArraySet.java
@@ -11,6 +11,7 @@ import javax.annotation.Nullable;
public class SortedArraySet<T> extends AbstractSet<T> implements ca.spottedleaf.moonrise.patches.chunk_system.util.ChunkSystemSortedArraySet<T> { // Paper - rewrite chunk system
private static final int DEFAULT_INITIAL_CAPACITY = 10;
private final Comparator<T> comparator;
+ private final boolean isNaturalOrder;
T[] contents;
int size;
@@ -93,10 +94,11 @@ public class SortedArraySet<T> extends AbstractSet<T> implements ca.spottedleaf.
private SortedArraySet(int initialCapacity, Comparator<T> comparator) {
this.comparator = comparator;
+ this.isNaturalOrder = comparator == Comparator.naturalOrder();
if (initialCapacity < 0) {
throw new IllegalArgumentException("Initial capacity (" + initialCapacity + ") is negative");
} else {
- this.contents = (T[])castRawArray(new Object[initialCapacity]);
+ this.contents = (T[]) castRawArray(new Object[initialCapacity]);
}
}
@@ -121,7 +123,51 @@ public class SortedArraySet<T> extends AbstractSet<T> implements ca.spottedleaf.
}
private int findIndex(T object) {
- return Arrays.binarySearch(this.contents, 0, this.size, object, this.comparator);
+ return isNaturalOrder ? naturalBinarySearch(object) : customBinarySearch(object);
+ }
+
+ private int naturalBinarySearch(T object) {
+ int low = 0;
+ int high = this.size - 1;
+ Comparable<? super T> key = (Comparable<? super T>) object;
+ T[] a = this.contents;
+
+ while (low <= high) {
+ int mid = (low + high) >>> 1;
+ T midVal = a[mid];
+ int cmp = key.compareTo(midVal);
+
+ if (cmp < 0) {
+ high = mid - 1;
+ } else if (cmp > 0) {
+ low = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ return -(low + 1);
+ }
+
+ private int customBinarySearch(T object) {
+ int low = 0;
+ int high = this.size - 1;
+ T[] a = this.contents;
+ Comparator<T> c = this.comparator;
+
+ while (low <= high) {
+ int mid = (low + high) >>> 1;
+ T midVal = a[mid];
+ int cmp = c.compare(midVal, object);
+
+ if (cmp < 0) {
+ low = mid + 1;
+ } else if (cmp > 0) {
+ high = mid - 1;
+ } else {
+ return mid;
+ }
+ }
+ return -(low + 1);
}
private static int getInsertionPosition(int index) {

View File

@@ -0,0 +1,55 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 23 Feb 2025 00:37:39 +0100
Subject: [PATCH] Make removeIf slightly faster
diff --git a/net/minecraft/util/SortedArraySet.java b/net/minecraft/util/SortedArraySet.java
index bde5b4cb4cda003acd7343b16f09f915b71fe3f2..7af029d8c2677ef00186acb5a3794b0ab3267ebd 100644
--- a/net/minecraft/util/SortedArraySet.java
+++ b/net/minecraft/util/SortedArraySet.java
@@ -18,35 +18,22 @@ public class SortedArraySet<T> extends AbstractSet<T> implements ca.spottedleaf.
// Paper start - rewrite chunk system
@Override
public final boolean removeIf(final java.util.function.Predicate<? super T> filter) {
- // prev. impl used an iterator, which could be n^2 and creates garbage
int i = 0;
final int len = this.size;
final T[] backingArray = this.contents;
- for (;;) {
- if (i >= len) {
- return false;
- }
- if (!filter.test(backingArray[i])) {
- ++i;
- continue;
- }
- break;
- }
+ // Find first element to remove
+ while (i < len && !filter.test(backingArray[i])) i++;
+ if (i == len) return false;
- // we only want to write back to backingArray if we really need to
-
- int lastIndex = i; // this is where new elements are shifted to
-
- for (; i < len; ++i) {
- final T curr = backingArray[i];
- if (!filter.test(curr)) { // if test throws we're screwed
- backingArray[lastIndex++] = curr;
- }
+ // Shift elements in-place
+ int lastIndex = i;
+ for (i++; i < len; i++) {
+ T curr = backingArray[i];
+ if (!filter.test(curr)) backingArray[lastIndex++] = curr;
}
- // cleanup end
- Arrays.fill(backingArray, lastIndex, len, null);
+ // Only update size - skip Arrays.fill (safe in ChunkHolderManager's context)
this.size = lastIndex;
return true;
}

View File

@@ -0,0 +1,169 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Fri, 21 Feb 2025 18:05:09 +0100
Subject: [PATCH] Optimize LinearPalette
diff --git a/net/minecraft/world/level/chunk/LinearPalette.java b/net/minecraft/world/level/chunk/LinearPalette.java
index 2073f6ff41aa570102621d183ee890b076267d54..459b6adca18868354374d00f3da906395fb474ab 100644
--- a/net/minecraft/world/level/chunk/LinearPalette.java
+++ b/net/minecraft/world/level/chunk/LinearPalette.java
@@ -1,5 +1,6 @@
package net.minecraft.world.level.chunk;
+import java.util.Arrays;
import java.util.List;
import java.util.function.Predicate;
import net.minecraft.core.IdMap;
@@ -10,6 +11,8 @@ import org.apache.commons.lang3.Validate;
public class LinearPalette<T> implements Palette<T>, ca.spottedleaf.moonrise.patches.fast_palette.FastPalette<T> { // Paper - optimise palette reads
private final IdMap<T> registry;
private final T[] values;
+ private final int[] byteSizes;
+ private final int[] idCache; // Cached registry IDs for values
private final PaletteResize<T> resizeHandler;
private final int bits;
private int size;
@@ -23,24 +26,34 @@ public class LinearPalette<T> implements Palette<T>, ca.spottedleaf.moonrise.pat
private LinearPalette(IdMap<T> registry, int bits, PaletteResize<T> resizeHandler, List<T> values) {
this.registry = registry;
- this.values = (T[])(new Object[1 << bits]);
+ this.values = (T[]) (new Object[1 << bits]);
+ this.idCache = new int[1 << bits];
+ this.byteSizes = new int[1 << bits]; // Initialize byteSizes
this.bits = bits;
this.resizeHandler = resizeHandler;
Validate.isTrue(values.size() <= this.values.length, "Can't initialize LinearPalette of size %d with %d entries", this.values.length, values.size());
for (int i = 0; i < values.size(); i++) {
- this.values[i] = values.get(i);
+ T value = values.get(i);
+ this.values[i] = value;
+ int id = registry.getId(value);
+ this.idCache[i] = id;
+ this.byteSizes[i] = VarInt.getByteSize(id); // Precompute byte size
}
-
this.size = values.size();
}
- private LinearPalette(IdMap<T> registry, T[] values, PaletteResize<T> resizeHandler, int bits, int size) {
+ private LinearPalette(IdMap<T> registry, T[] values, int[] idCache, PaletteResize<T> resizeHandler, int bits, int size) {
this.registry = registry;
this.values = values;
+ this.idCache = idCache;
this.resizeHandler = resizeHandler;
this.bits = bits;
this.size = size;
+ this.byteSizes = new int[idCache.length];
+ for (int i = 0; i < idCache.length; i++) {
+ this.byteSizes[i] = VarInt.getByteSize(idCache[i]);
+ }
}
public static <A> Palette<A> create(int bits, IdMap<A> registry, PaletteResize<A> resizeHandler, List<A> values) {
@@ -58,6 +71,9 @@ public class LinearPalette<T> implements Palette<T>, ca.spottedleaf.moonrise.pat
int ix = this.size;
if (ix < this.values.length) {
this.values[ix] = state;
+ int id = registry.getId(state);
+ this.idCache[ix] = id;
+ this.byteSizes[ix] = VarInt.getByteSize(id); // Cache byte size
this.size++;
return ix;
} else {
@@ -88,29 +104,28 @@ public class LinearPalette<T> implements Palette<T>, ca.spottedleaf.moonrise.pat
@Override
public void read(FriendlyByteBuf buffer) {
this.size = buffer.readVarInt();
-
for (int i = 0; i < this.size; i++) {
- this.values[i] = this.registry.byIdOrThrow(buffer.readVarInt());
+ int id = buffer.readVarInt();
+ this.values[i] = this.registry.byIdOrThrow(id);
+ this.idCache[i] = id;
+ this.byteSizes[i] = VarInt.getByteSize(id); // Precompute during read
}
}
@Override
public void write(FriendlyByteBuf buffer) {
buffer.writeVarInt(this.size);
-
for (int i = 0; i < this.size; i++) {
- buffer.writeVarInt(this.registry.getId(this.values[i]));
+ buffer.writeVarInt(this.idCache[i]); // Use cached ID
}
}
@Override
public int getSerializedSize() {
- int byteSize = VarInt.getByteSize(this.getSize());
-
- for (int i = 0; i < this.getSize(); i++) {
- byteSize += VarInt.getByteSize(this.registry.getId(this.values[i]));
+ int byteSize = VarInt.getByteSize(this.size);
+ for (int i = 0; i < this.size; i++) {
+ byteSize += this.byteSizes[i]; // Use cached byte sizes
}
-
return byteSize;
}
@@ -121,6 +136,54 @@ public class LinearPalette<T> implements Palette<T>, ca.spottedleaf.moonrise.pat
@Override
public Palette<T> copy(PaletteResize<T> resizeHandler) {
- return new LinearPalette<>(this.registry, (T[])((Object[])this.values.clone()), resizeHandler, this.bits, this.size);
+ // Special case for empty palette - fastest possible return
+ if (this.size == 0) {
+ return new LinearPalette<>(this.registry, (T[]) new Object[1], new int[1], resizeHandler, this.bits, 0);
+ }
+
+ // For small sizes, allocate exact-sized arrays and use direct assignment
+ if (this.size <= 4) {
+ @SuppressWarnings("unchecked")
+ T[] valuesCopy = (T[]) new Object[this.size];
+ int[] idCacheCopy = new int[this.size];
+
+ // Unrolled loop eliminates loop overhead for small arrays
+ switch (this.size) {
+ case 4:
+ valuesCopy[3] = this.values[3];
+ idCacheCopy[3] = this.idCache[3];
+ // Fall through
+ case 3:
+ valuesCopy[2] = this.values[2];
+ idCacheCopy[2] = this.idCache[2];
+ // Fall through
+ case 2:
+ valuesCopy[1] = this.values[1];
+ idCacheCopy[1] = this.idCache[1];
+ // Fall through
+ case 1:
+ valuesCopy[0] = this.values[0];
+ idCacheCopy[0] = this.idCache[0];
+ }
+
+ return new LinearPalette<>(this.registry, valuesCopy, idCacheCopy, resizeHandler, this.bits, this.size);
+ }
+
+ // For larger arrays, use optimized bulk operations
+ @SuppressWarnings("unchecked")
+ T[] valuesCopy = (T[]) new Object[this.size];
+ int[] idCacheCopy = new int[this.size];
+
+ System.arraycopy(this.values, 0, valuesCopy, 0, this.size);
+ System.arraycopy(this.idCache, 0, idCacheCopy, 0, this.size);
+
+ return new LinearPalette<>(
+ this.registry,
+ valuesCopy,
+ idCacheCopy,
+ resizeHandler,
+ this.bits,
+ this.size
+ );
}
}

View File

@@ -0,0 +1,41 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 24 Feb 2025 16:33:46 +0100
Subject: [PATCH] Optimize IdMapper
diff --git a/net/minecraft/core/IdMapper.java b/net/minecraft/core/IdMapper.java
index 439dc29b8ee8a1dc2ec63c00a9727a37bb697bad..b063cdb0667de69c5ab5ce3895a0adfb19d6a28f 100644
--- a/net/minecraft/core/IdMapper.java
+++ b/net/minecraft/core/IdMapper.java
@@ -11,7 +11,7 @@ import javax.annotation.Nullable;
public class IdMapper<T> implements IdMap<T> {
private int nextId;
- private final Reference2IntMap<T> tToId;
+ private final it.unimi.dsi.fastutil.objects.Object2IntOpenCustomHashMap<T> tToId;
private final List<T> idToT;
public IdMapper() {
@@ -20,7 +20,20 @@ public class IdMapper<T> implements IdMap<T> {
public IdMapper(int expectedSize) {
this.idToT = Lists.newArrayListWithExpectedSize(expectedSize);
- this.tToId = new Reference2IntOpenHashMap<>(expectedSize);
+ this.tToId = new it.unimi.dsi.fastutil.objects.Object2IntOpenCustomHashMap<>(
+ expectedSize,
+ new it.unimi.dsi.fastutil.Hash.Strategy<T>() {
+ @Override
+ public int hashCode(T o) {
+ return System.identityHashCode(o);
+ }
+
+ @Override
+ public boolean equals(T a, T b) {
+ return a == b;
+ }
+ }
+ );
this.tToId.defaultReturnValue(-1);
}

View File

@@ -0,0 +1,81 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 24 Feb 2025 21:11:09 +0100
Subject: [PATCH] Slight optimizations to VarInt
diff --git a/net/minecraft/network/VarInt.java b/net/minecraft/network/VarInt.java
index 6f8dd31582f0e1d3a71acc7a142c1f4ec0539d9e..043db53ee627ac13e3a952c8d5beba5065ecbb48 100644
--- a/net/minecraft/network/VarInt.java
+++ b/net/minecraft/network/VarInt.java
@@ -51,35 +51,41 @@ public class VarInt {
}
public static ByteBuf write(ByteBuf buffer, int value) {
- // Gale start - Velocity - optimized VarInt#write
- if ((value & 0xFFFFFF80) == 0) {
- buffer.writeByte(value);
- } else if ((value & 0xFFFFC000) == 0) {
- int w = (value & 0x7F) << 8
- | (value >>> 7)
- | 0x00008000;
- buffer.writeShort(w);
- } else if ((value & 0xFFE00000) == 0) {
- int w = (value & 0x7F) << 16
- | (value & 0x3F80) << 1
- | (value >>> 14)
- | 0x00808000;
- buffer.writeMedium(w);
- } else if ((value & 0xF0000000) == 0) {
- int w = (value & 0x7F) << 24
- | ((value & 0x3F80) << 9)
- | (value & 0x1FC000) >> 6
- | (value >>> 21)
- | 0x80808000;
- buffer.writeInt(w);
- } else {
- int w = (value & 0x7F) << 24
- | (value & 0x3F80) << 9
- | (value & 0x1FC000) >> 6
- | ((value >>> 21) & 0x7F)
- | 0x80808080;
- buffer.writeInt(w);
- buffer.writeByte(value >>> 28);
+ // Gale start - Velocity - optimized VarInt#write // Leaf - help JIT by using switch case
+ int bytesNeeded = getByteSize(value);
+
+ switch (bytesNeeded) {
+ case 1:
+ buffer.writeByte(value);
+ break;
+ case 2:
+ int w2 = ((value & 0x7F) << 8) | (value >>> 7) | 0x00008000;
+ buffer.writeShort(w2);
+ break;
+ case 3:
+ int w3 = (value & 0x7F) << 16
+ | (value & 0x3F80) << 1
+ | (value >>> 14)
+ | 0x00808000;
+ buffer.writeMedium(w3);
+ break;
+ case 4:
+ int w4 = (value & 0x7F) << 24
+ | ((value & 0x3F80) << 9)
+ | (value & 0x1FC000) >> 6
+ | (value >>> 21)
+ | 0x80808000;
+ buffer.writeInt(w4);
+ break;
+ case 5:
+ int w5 = (value & 0x7F) << 24
+ | (value & 0x3F80) << 9
+ | (value & 0x1FC000) >> 6
+ | ((value >>> 21) & 0x7F)
+ | 0x80808080;
+ buffer.writeInt(w5);
+ buffer.writeByte(value >>> 28);
+ break;
}
return buffer;
}

View File

@@ -0,0 +1,236 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Mon, 24 Feb 2025 21:33:24 +0100
Subject: [PATCH] Rewrite ClientboundLightUpdatePacketData
diff --git a/net/minecraft/network/protocol/game/ClientboundLightUpdatePacketData.java b/net/minecraft/network/protocol/game/ClientboundLightUpdatePacketData.java
index a0b54f3a3d11e0f0f1cb806406a870ba36da8f07..2ef45811f3a3a763f389e8e6e9eeaf255cf668e6 100644
--- a/net/minecraft/network/protocol/game/ClientboundLightUpdatePacketData.java
+++ b/net/minecraft/network/protocol/game/ClientboundLightUpdatePacketData.java
@@ -1,8 +1,8 @@
package net.minecraft.network.protocol.game;
-import com.google.common.collect.Lists;
import io.netty.buffer.ByteBuf;
import java.util.BitSet;
+import java.util.Arrays;
import java.util.List;
import javax.annotation.Nullable;
import net.minecraft.core.SectionPos;
@@ -16,30 +16,109 @@ import net.minecraft.world.level.lighting.LevelLightEngine;
public class ClientboundLightUpdatePacketData {
private static final StreamCodec<ByteBuf, byte[]> DATA_LAYER_STREAM_CODEC = ByteBufCodecs.byteArray(2048);
+
+ // Static constants to avoid allocations
+ private static final byte[][] EMPTY_ARRAY = new byte[0][];
+
+ // Pre-sized arrays to avoid dynamic resizing
+ private static final ThreadLocal<byte[][]> SKY_BUFFER = ThreadLocal.withInitial(() -> new byte[256][]);
+ private static final ThreadLocal<byte[][]> BLOCK_BUFFER = ThreadLocal.withInitial(() -> new byte[256][]);
+
+ // Pre-cached BitSets with fixed size
private final BitSet skyYMask;
private final BitSet blockYMask;
private final BitSet emptySkyYMask;
private final BitSet emptyBlockYMask;
- private final List<byte[]> skyUpdates;
- private final List<byte[]> blockUpdates;
+
+ // Fixed arrays with exact counts
+ private final byte[][] skyUpdates;
+ private final byte[][] blockUpdates;
+ private final int skyUpdateCount;
+ private final int blockUpdateCount;
public ClientboundLightUpdatePacketData(ChunkPos chunkPos, LevelLightEngine lightEngine, @Nullable BitSet skyLight, @Nullable BitSet blockLight) {
- this.skyYMask = new BitSet();
- this.blockYMask = new BitSet();
- this.emptySkyYMask = new BitSet();
- this.emptyBlockYMask = new BitSet();
- this.skyUpdates = Lists.newArrayList();
- this.blockUpdates = Lists.newArrayList();
-
- for (int i = 0; i < lightEngine.getLightSectionCount(); i++) {
+ int sectionCount = lightEngine.getLightSectionCount();
+
+ // Round up to nearest long boundary (64 bits) to prevent BitSet expansion
+ int longWords = (sectionCount + 63) >>> 6;
+ int bitSetSize = longWords << 6;
+
+ // Pre-size all BitSets to exact size needed
+ this.skyYMask = new BitSet(bitSetSize);
+ this.blockYMask = new BitSet(bitSetSize);
+ this.emptySkyYMask = new BitSet(bitSetSize);
+ this.emptyBlockYMask = new BitSet(bitSetSize);
+
+ // Get buffer arrays from thread local storage to avoid allocations
+ byte[][] skyBuffer = SKY_BUFFER.get();
+ byte[][] blockBuffer = BLOCK_BUFFER.get();
+
+ // Process all sections in a single pass
+ int skyCount = 0;
+ int blockCount = 0;
+ int minLightSection = lightEngine.getMinLightSection();
+
+ // Cache layer listeners to avoid repeated method calls
+ var skyLayerListener = lightEngine.getLayerListener(LightLayer.SKY);
+ var blockLayerListener = lightEngine.getLayerListener(LightLayer.BLOCK);
+
+ // Single pass through all sections
+ for (int i = 0; i < sectionCount; i++) {
+ int sectionY = minLightSection + i;
+ SectionPos sectionPos = SectionPos.of(chunkPos.x, sectionY, chunkPos.z);
+
+ // Process sky light
if (skyLight == null || skyLight.get(i)) {
- this.prepareSectionData(chunkPos, lightEngine, LightLayer.SKY, i, this.skyYMask, this.emptySkyYMask, this.skyUpdates);
+ DataLayer skyData = skyLayerListener.getDataLayerData(sectionPos);
+ if (skyData != null) {
+ if (skyData.isEmpty()) {
+ emptySkyYMask.set(i);
+ } else {
+ skyYMask.set(i);
+ // Store in buffer temporarily - only clone at the end
+ skyBuffer[skyCount++] = skyData.getData();
+ }
+ }
}
+ // Process block light
if (blockLight == null || blockLight.get(i)) {
- this.prepareSectionData(chunkPos, lightEngine, LightLayer.BLOCK, i, this.blockYMask, this.emptyBlockYMask, this.blockUpdates);
+ DataLayer blockData = blockLayerListener.getDataLayerData(sectionPos);
+ if (blockData != null) {
+ if (blockData.isEmpty()) {
+ emptyBlockYMask.set(i);
+ } else {
+ blockYMask.set(i);
+ // Store in buffer temporarily - only clone at the end
+ blockBuffer[blockCount++] = blockData.getData();
+ }
+ }
+ }
+ }
+
+ // Create final arrays with exact sizes
+ if (skyCount > 0) {
+ this.skyUpdates = new byte[skyCount][];
+ // Clone only at the end to minimize work
+ for (int i = 0; i < skyCount; i++) {
+ this.skyUpdates[i] = skyBuffer[i].clone();
+ }
+ } else {
+ this.skyUpdates = EMPTY_ARRAY;
+ }
+
+ if (blockCount > 0) {
+ this.blockUpdates = new byte[blockCount][];
+ // Clone only at the end to minimize work
+ for (int i = 0; i < blockCount; i++) {
+ this.blockUpdates[i] = blockBuffer[i].clone();
}
+ } else {
+ this.blockUpdates = EMPTY_ARRAY;
}
+
+ this.skyUpdateCount = skyCount;
+ this.blockUpdateCount = blockCount;
}
public ClientboundLightUpdatePacketData(FriendlyByteBuf buffer, int x, int z) {
@@ -47,8 +126,28 @@ public class ClientboundLightUpdatePacketData {
this.blockYMask = buffer.readBitSet();
this.emptySkyYMask = buffer.readBitSet();
this.emptyBlockYMask = buffer.readBitSet();
- this.skyUpdates = buffer.readList(DATA_LAYER_STREAM_CODEC);
- this.blockUpdates = buffer.readList(DATA_LAYER_STREAM_CODEC);
+
+ // Read lists directly as arrays to avoid intermediate collections
+ List<byte[]> skyList = buffer.readList(DATA_LAYER_STREAM_CODEC);
+ List<byte[]> blockList = buffer.readList(DATA_LAYER_STREAM_CODEC);
+
+ int skySize = skyList.size();
+ int blockSize = blockList.size();
+
+ if (skySize > 0) {
+ this.skyUpdates = skyList.toArray(new byte[skySize][]);
+ } else {
+ this.skyUpdates = EMPTY_ARRAY;
+ }
+
+ if (blockSize > 0) {
+ this.blockUpdates = blockList.toArray(new byte[blockSize][]);
+ } else {
+ this.blockUpdates = EMPTY_ARRAY;
+ }
+
+ this.skyUpdateCount = skySize;
+ this.blockUpdateCount = blockSize;
}
public void write(FriendlyByteBuf buffer) {
@@ -56,25 +155,31 @@ public class ClientboundLightUpdatePacketData {
buffer.writeBitSet(this.blockYMask);
buffer.writeBitSet(this.emptySkyYMask);
buffer.writeBitSet(this.emptyBlockYMask);
- buffer.writeCollection(this.skyUpdates, DATA_LAYER_STREAM_CODEC);
- buffer.writeCollection(this.blockUpdates, DATA_LAYER_STREAM_CODEC);
- }
- private void prepareSectionData(
- ChunkPos chunkPos, LevelLightEngine levelLightEngine, LightLayer lightLayer, int index, BitSet skyLight, BitSet blockLight, List<byte[]> updates
- ) {
- DataLayer dataLayerData = levelLightEngine.getLayerListener(lightLayer)
- .getDataLayerData(SectionPos.of(chunkPos, levelLightEngine.getMinLightSection() + index));
- if (dataLayerData != null) {
- if (dataLayerData.isEmpty()) {
- blockLight.set(index);
- } else {
- skyLight.set(index);
- updates.add(dataLayerData.copy().getData());
+ // Avoid creating unnecessary objects when writing
+ if (this.skyUpdateCount > 0) {
+ // Use direct array access for efficiency
+ buffer.writeVarInt(this.skyUpdateCount);
+ for (int i = 0; i < this.skyUpdateCount; i++) {
+ DATA_LAYER_STREAM_CODEC.encode(buffer, this.skyUpdates[i]);
}
+ } else {
+ buffer.writeVarInt(0);
+ }
+
+ if (this.blockUpdateCount > 0) {
+ // Use direct array access for efficiency
+ buffer.writeVarInt(this.blockUpdateCount);
+ for (int i = 0; i < this.blockUpdateCount; i++) {
+ DATA_LAYER_STREAM_CODEC.encode(buffer, this.blockUpdates[i]);
+ }
+ } else {
+ buffer.writeVarInt(0);
}
}
+ // Getter methods
+
public BitSet getSkyYMask() {
return this.skyYMask;
}
@@ -84,7 +189,7 @@ public class ClientboundLightUpdatePacketData {
}
public List<byte[]> getSkyUpdates() {
- return this.skyUpdates;
+ return this.skyUpdateCount > 0 ? Arrays.asList(this.skyUpdates) : List.of();
}
public BitSet getBlockYMask() {
@@ -96,6 +201,6 @@ public class ClientboundLightUpdatePacketData {
}
public List<byte[]> getBlockUpdates() {
- return this.blockUpdates;
+ return this.blockUpdateCount > 0 ? Arrays.asList(this.blockUpdates) : List.of();
}
}

View File

@@ -0,0 +1,89 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Tue, 25 Feb 2025 21:13:54 +0100
Subject: [PATCH] Some Optimizations on SerializableChunkData
diff --git a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
index 6b6aaeca14178b5b709e20ae13552d42217f15c0..c10ed10dd843bfa12be3f80a244cda94f8c56807 100644
--- a/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
+++ b/net/minecraft/world/level/chunk/storage/SerializableChunkData.java
@@ -502,14 +502,14 @@ public record SerializableChunkData(
throw new IllegalArgumentException("Chunk can't be serialized: " + chunk);
} else {
ChunkPos pos = chunk.getPos();
- List<SerializableChunkData.SectionData> list = new ArrayList<>(); final List<SerializableChunkData.SectionData> sectionsList = list; // Paper - starlight - OBFHELPER
- LevelChunkSection[] sections = chunk.getSections();
- LevelLightEngine lightEngine = level.getChunkSource().getLightEngine();
// Paper start - starlight
final int minLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinLightSection(level);
final int maxLightSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMaxLightSection(level);
final int minBlockSection = ca.spottedleaf.moonrise.common.util.WorldUtil.getMinSection(level);
+ // Pre-allocate with correct capacity to avoid resizing
+ final int expectedSectionCount = maxLightSection - minLightSection + 1;
+ List<SerializableChunkData.SectionData> list = new ArrayList<>(expectedSectionCount);
final LevelChunkSection[] chunkSections = chunk.getSections();
final ca.spottedleaf.moonrise.patches.starlight.light.SWMRNibbleArray[] blockNibbles = ((ca.spottedleaf.moonrise.patches.starlight.chunk.StarlightChunk)chunk).starlight$getBlockNibbles();
@@ -541,10 +541,11 @@ public record SerializableChunkData(
((ca.spottedleaf.moonrise.patches.starlight.storage.StarlightSectionData)(Object)sectionData).starlight$setSkyLightState(skyNibble.state);
}
- sectionsList.add(sectionData);
+ list.add(sectionData);
}
// Paper end - starlight
+ // Pre-allocate block entities list with exact size needed
List<CompoundTag> list1 = new ArrayList<>(chunk.getBlockEntitiesPos().size());
for (BlockPos blockPos : chunk.getBlockEntitiesPos()) {
@@ -554,7 +555,14 @@ public record SerializableChunkData(
}
}
- List<CompoundTag> list2 = new ArrayList<>();
+ // For entities, use an initial estimated capacity if it's a ProtoChunk
+ int entityEstimate = 64; // Reasonable default size
+ if (chunk.getPersistedStatus().getChunkType() == ChunkType.PROTOCHUNK) {
+ ProtoChunk protoChunk = (ProtoChunk)chunk;
+ entityEstimate = Math.max(16, protoChunk.getEntities().size());
+ }
+ List<CompoundTag> list2 = new ArrayList<>(entityEstimate);
+
long[] longs = null;
if (chunk.getPersistedStatus().getChunkType() == ChunkType.PROTOCHUNK) {
ProtoChunk protoChunk = (ProtoChunk)chunk;
@@ -570,14 +578,16 @@ public record SerializableChunkData(
for (Entry<Heightmap.Types, Heightmap> entry : chunk.getHeightmaps()) {
if (chunk.getPersistedStatus().heightmapsAfter().contains(entry.getKey())) {
long[] rawData = entry.getValue().getRawData();
- map.put(entry.getKey(), (long[])rawData.clone());
+ map.put(entry.getKey(), Arrays.copyOf(rawData, rawData.length));
}
}
ChunkAccess.PackedTicks ticksForSerialization = chunk.getTicksForSerialization(level.getGameTime());
- ShortList[] lists = Arrays.stream(chunk.getPostProcessing())
- .map(list3 -> list3 != null ? new ShortArrayList(list3) : null)
- .toArray(ShortList[]::new);
+ ShortList[] postProcessing = chunk.getPostProcessing();
+ ShortList[] lists = new ShortList[postProcessing.length];
+ for (int i = 0; i < postProcessing.length; i++) {
+ lists[i] = postProcessing[i] != null ? new ShortArrayList(postProcessing[i]) : null;
+ }
CompoundTag compoundTag = packStructureData(
StructurePieceSerializationContext.fromLevel(level), pos, chunk.getAllStarts(), chunk.getAllReferences()
);
@@ -605,8 +615,8 @@ public record SerializableChunkData(
list,
list2,
list1,
- compoundTag
- , persistentDataContainer // CraftBukkit - persistentDataContainer
+ compoundTag,
+ persistentDataContainer // CraftBukkit - persistentDataContainer
);
}
}

View File

@@ -0,0 +1,125 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Thu, 27 Feb 2025 23:39:32 +0100
Subject: [PATCH] Rework ChunkHolderManager
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
index be820c6093dd2ae7642b9bee11edf65e3a8d7242..29872fa10ec833ff9391fc09df034204f092f8a6 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/scheduling/ChunkHolderManager.java
@@ -736,24 +736,19 @@ public final class ChunkHolderManager {
final int sectionShift = ((ChunkSystemServerLevel)this.world).moonrise$getRegionChunkShift();
- final Predicate<Ticket<?>> expireNow = (final Ticket<?> ticket) -> {
- long removeDelay = ((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay();
- if (removeDelay == NO_TIMEOUT_MARKER) {
- return false;
- }
- --removeDelay;
- ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(removeDelay);
- return removeDelay <= 0L;
- };
-
+ // Collect sections to process first to avoid concurrent modification issues
+ List<Long> sectionKeys = new ArrayList<>();
for (final PrimitiveIterator.OfLong iterator = this.sectionToChunkToExpireCount.keyIterator(); iterator.hasNext();) {
- final long sectionKey = iterator.nextLong();
+ sectionKeys.add(iterator.nextLong());
+ }
+ for (final Long sectionKey : sectionKeys) {
+ // Skip if section was removed concurrently
if (!this.sectionToChunkToExpireCount.containsKey(sectionKey)) {
- // removed concurrently
continue;
}
+ // Acquire lock for this section only
final ReentrantAreaLock.Node ticketLock = this.ticketLockArea.lock(
CoordinateUtils.getChunkX(sectionKey) << sectionShift,
CoordinateUtils.getChunkZ(sectionKey) << sectionShift
@@ -761,45 +756,66 @@ public final class ChunkHolderManager {
try {
final Long2IntOpenHashMap chunkToExpireCount = this.sectionToChunkToExpireCount.get(sectionKey);
- if (chunkToExpireCount == null) {
- // lost to some race
+ if (chunkToExpireCount == null || chunkToExpireCount.isEmpty()) {
+ // Section was removed or is empty, clean up
+ if (chunkToExpireCount != null && chunkToExpireCount.isEmpty()) {
+ this.sectionToChunkToExpireCount.remove(sectionKey);
+ }
continue;
}
+ // Process each chunk in this section
for (final Iterator<Long2IntMap.Entry> iterator1 = chunkToExpireCount.long2IntEntrySet().fastIterator(); iterator1.hasNext();) {
final Long2IntMap.Entry entry = iterator1.next();
-
final long chunkKey = entry.getLongKey();
final int expireCount = entry.getIntValue();
final SortedArraySet<Ticket<?>> tickets = this.tickets.get(chunkKey);
+ if (tickets == null || tickets.isEmpty()) {
+ iterator1.remove();
+ continue;
+ }
+
final int levelBefore = getTicketLevelAt(tickets);
+ int expiredCount = 0;
- final int sizeBefore = tickets.size();
- tickets.removeIf(expireNow);
- final int sizeAfter = tickets.size();
- final int levelAfter = getTicketLevelAt(tickets);
+ // More efficient ticket processing - avoids creating a new predicate each time
+ for (Iterator<Ticket<?>> ticketIterator = tickets.iterator(); ticketIterator.hasNext();) {
+ Ticket<?> ticket = ticketIterator.next();
+ long removeDelay = ((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay();
+
+ if (removeDelay == NO_TIMEOUT_MARKER) {
+ continue;
+ }
+
+ --removeDelay;
+ if (removeDelay <= 0) {
+ ticketIterator.remove();
+ expiredCount++;
+ } else {
+ ((ChunkSystemTicket<?>)(Object)ticket).moonrise$setRemoveDelay(removeDelay);
+ }
+ }
if (tickets.isEmpty()) {
this.tickets.remove(chunkKey);
}
+
+ final int levelAfter = getTicketLevelAt(tickets);
if (levelBefore != levelAfter) {
this.updateTicketLevel(chunkKey, levelAfter);
}
- final int newExpireCount = expireCount - (sizeBefore - sizeAfter);
-
- if (newExpireCount == expireCount) {
- continue;
- }
-
- if (newExpireCount != 0) {
- entry.setValue(newExpireCount);
- } else {
+ // Update expire count
+ final int newExpireCount = expireCount - expiredCount;
+ if (newExpireCount <= 0) {
iterator1.remove();
+ } else if (newExpireCount != expireCount) {
+ entry.setValue(newExpireCount);
}
}
+ // Remove empty sections
if (chunkToExpireCount.isEmpty()) {
this.sectionToChunkToExpireCount.remove(sectionKey);
}

View File

@@ -0,0 +1,222 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Fri, 28 Feb 2025 01:35:49 +0100
Subject: [PATCH] Optimize chunkUnload
diff --git a/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java b/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java
index 4ca68a903e67606fc4ef0bfa9862a73797121c8b..75893974d198b946bdc07b01b4c68ff999df5028 100644
--- a/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java
+++ b/ca/spottedleaf/moonrise/patches/starlight/light/SWMRNibbleArray.java
@@ -3,6 +3,8 @@ package ca.spottedleaf.moonrise.patches.starlight.light;
import net.minecraft.world.level.chunk.DataLayer;
import java.util.ArrayDeque;
import java.util.Arrays;
+import java.util.Map;
+import java.util.WeakHashMap;
// SWMR -> Single Writer Multi Reader Nibble Array
public final class SWMRNibbleArray {
@@ -22,21 +24,35 @@ public final class SWMRNibbleArray {
protected static final int INIT_STATE_INIT = 2; // initialised
protected static final int INIT_STATE_HIDDEN = 3; // initialised, but conversion to Vanilla data should be treated as if NULL
+ private volatile boolean cachedIsAllZero = false;
+ private boolean cachedIsAllZeroValid = false;
+
+ private static final ThreadLocal<SaveState[]> SAVE_STATE_CACHE =
+ ThreadLocal.withInitial(() -> new SaveState[4]);
+
public static final int ARRAY_SIZE = 16 * 16 * 16 / (8/4); // blocks / bytes per block
// this allows us to maintain only 1 byte array when we're not updating
- static final ThreadLocal<ArrayDeque<byte[]>> WORKING_BYTES_POOL = ThreadLocal.withInitial(ArrayDeque::new);
+ static final ThreadLocal<ArrayDeque<byte[]>> WORKING_BYTES_POOL = new ThreadLocal<ArrayDeque<byte[]>>() {
+ @Override
+ protected ArrayDeque<byte[]> initialValue() {
+ return new ArrayDeque<byte[]>(8); // Limit pool size to avoid memory leaks
+ }
+ };
private static byte[] allocateBytes() {
- final byte[] inPool = WORKING_BYTES_POOL.get().pollFirst();
+ final ArrayDeque<byte[]> queue = WORKING_BYTES_POOL.get();
+ final byte[] inPool = queue.pollFirst();
if (inPool != null) {
return inPool;
}
-
return new byte[ARRAY_SIZE];
}
private static void freeBytes(final byte[] bytes) {
- WORKING_BYTES_POOL.get().addFirst(bytes);
+ final ArrayDeque<byte[]> queue = WORKING_BYTES_POOL.get();
+ if (queue.size() < 8) { // Limit pool size to prevent memory leaks
+ queue.addFirst(bytes);
+ }
}
public static SWMRNibbleArray fromVanilla(final DataLayer nibble) {
@@ -131,15 +147,44 @@ public final class SWMRNibbleArray {
public SaveState getSaveState() {
synchronized (this) {
final int state = this.stateVisible;
- final byte[] data = this.storageVisible;
if (state == INIT_STATE_NULL) {
return null;
}
+
if (state == INIT_STATE_UNINIT) {
- return new SaveState(null, state);
+ // Use array-based cache instead of WeakHashMap
+ SaveState[] cache = SAVE_STATE_CACHE.get();
+ SaveState cachedState = cache[INIT_STATE_UNINIT];
+ if (cachedState == null) {
+ cachedState = new SaveState(null, state);
+ cache[INIT_STATE_UNINIT] = cachedState;
+ }
+ return cachedState;
+ }
+
+ // Check if we need to test for all zeros
+ final byte[] data = this.storageVisible;
+ boolean zero;
+ if (cachedIsAllZeroValid) {
+ zero = cachedIsAllZero;
+ } else {
+ zero = isAllZero(data);
+ cachedIsAllZero = zero;
+ cachedIsAllZeroValid = true;
}
- final boolean zero = isAllZero(data);
+
if (zero) {
+ // Use array-based cache instead of WeakHashMap
+ SaveState[] cache = SAVE_STATE_CACHE.get();
+ int cacheKey = state == INIT_STATE_INIT ? INIT_STATE_UNINIT : -1;
+ if (cacheKey >= 0) {
+ SaveState cachedState = cache[cacheKey];
+ if (cachedState == null) {
+ cachedState = new SaveState(null, cacheKey);
+ cache[cacheKey] = cachedState;
+ }
+ return cachedState;
+ }
return state == INIT_STATE_INIT ? new SaveState(null, INIT_STATE_UNINIT) : null;
} else {
return new SaveState(data.clone(), state);
@@ -148,14 +193,23 @@ public final class SWMRNibbleArray {
}
protected static boolean isAllZero(final byte[] data) {
- for (int i = 0; i < (ARRAY_SIZE >>> 4); ++i) {
- byte whole = data[i << 4];
-
- for (int k = 1; k < (1 << 4); ++k) {
- whole |= data[(i << 4) | k];
+ // check in 8-byte chunks
+ final int longLength = ARRAY_SIZE >>> 3;
+ for (int i = 0; i < longLength; i++) {
+ long value = 0;
+ final int baseIndex = i << 3;
+ // Combine 8 bytes into a long
+ for (int j = 0; j < 8; j++) {
+ value |= ((long)(data[baseIndex + j] & 0xFF)) << (j << 3);
+ }
+ if (value != 0) {
+ return false;
}
+ }
- if (whole != 0) {
+ // Check remaining bytes
+ for (int i = longLength << 3; i < ARRAY_SIZE; i++) {
+ if (data[i] != 0) {
return false;
}
}
@@ -163,6 +217,10 @@ public final class SWMRNibbleArray {
return true;
}
+ private void invalidateCache() {
+ this.cachedIsAllZeroValid = false;
+ }
+
// operation type: updating on src, updating on other
public void extrudeLower(final SWMRNibbleArray other) {
if (other.stateUpdating == INIT_STATE_NULL) {
@@ -349,6 +407,7 @@ public final class SWMRNibbleArray {
}
this.updatingDirty = false;
this.stateVisible = this.stateUpdating;
+ this.cachedIsAllZeroValid = false; // Invalidate cache on update
}
return true;
@@ -424,7 +483,14 @@ public final class SWMRNibbleArray {
final int shift = (index & 1) << 2;
final int i = index >>> 1;
- this.storageUpdating[i] = (byte)((this.storageUpdating[i] & (0xF0 >>> shift)) | (value << shift));
+ byte oldValue = this.storageUpdating[i];
+ byte newValue = (byte)((oldValue & (0xF0 >>> shift)) | (value << shift));
+
+ // Only invalidate cache if the value actually changes
+ if (oldValue != newValue) {
+ this.storageUpdating[i] = newValue;
+ this.invalidateCache();
+ }
}
public static final class SaveState {
diff --git a/net/minecraft/world/level/chunk/LevelChunkSection.java b/net/minecraft/world/level/chunk/LevelChunkSection.java
index b8ac6a9ba7b56ccd034757f7d135d272b8e69e90..f6c33a7ca59d00c8967034402be00767a66b6948 100644
--- a/net/minecraft/world/level/chunk/LevelChunkSection.java
+++ b/net/minecraft/world/level/chunk/LevelChunkSection.java
@@ -24,6 +24,7 @@ public class LevelChunkSection implements ca.spottedleaf.moonrise.patches.block_
private boolean isRandomlyTickingBlocksStatus; // Leaf - Cache random tick block status
public final PalettedContainer<BlockState> states;
private PalettedContainer<Holder<Biome>> biomes; // CraftBukkit - read/write
+ private boolean modified = false;
// Paper start - block counting
private static final it.unimi.dsi.fastutil.shorts.ShortArrayList FULL_LIST = new it.unimi.dsi.fastutil.shorts.ShortArrayList(16*16*16);
@@ -135,6 +136,7 @@ public class LevelChunkSection implements ca.spottedleaf.moonrise.patches.block_
// Paper end - block counting
public BlockState setBlockState(int x, int y, int z, BlockState state, boolean useLocks) {
+ this.modified = true;
BlockState blockState;
if (useLocks) {
blockState = this.states.getAndSet(x, y, z, state);
@@ -328,7 +330,29 @@ public class LevelChunkSection implements ca.spottedleaf.moonrise.patches.block_
this.biomes = palettedContainer;
}
+ private LevelChunkSection(short nonEmptyBlockCount, short tickingBlockCount, short tickingFluidCount,
+ PalettedContainer<BlockState> states, PalettedContainer<Holder<Biome>> biomes) {
+ this.nonEmptyBlockCount = nonEmptyBlockCount;
+ this.tickingBlockCount = tickingBlockCount;
+ this.tickingFluidCount = tickingFluidCount;
+ this.states = states;
+ this.biomes = biomes;
+ this.isRandomlyTickingBlocksStatus = this.tickingBlockCount > 0;
+ }
+
public LevelChunkSection copy() {
+ // If the section hasn't been modified and no random ticking blocks/fluids,
+ // return a lightweight copy that shares palette data
+ if (!this.modified && this.tickingBlockCount == 0 && this.tickingFluidCount == 0) {
+ return new LevelChunkSection(
+ this.nonEmptyBlockCount,
+ this.tickingBlockCount,
+ this.tickingFluidCount,
+ this.states, // Share reference instead of copying
+ this.biomes // Share reference instead of copying
+ );
+ }
return new LevelChunkSection(this);
}
+
}

View File

@@ -0,0 +1,126 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Taiyou06 <kaandindar21@gmail.com>
Date: Sun, 2 Mar 2025 21:23:20 +0100
Subject: [PATCH] Async ChunkSend
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java b/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java
index a35e9fae8f8da0c42f0616c4f78dc396492673aa..31f9556e808c9dea49ba9774cbf736791ed9a687 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java
@@ -22,15 +22,13 @@ import it.unimi.dsi.fastutil.longs.LongComparator;
import it.unimi.dsi.fastutil.longs.LongHeapPriorityQueue;
import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
import net.minecraft.network.protocol.Packet;
-import net.minecraft.network.protocol.game.ClientboundForgetLevelChunkPacket;
-import net.minecraft.network.protocol.game.ClientboundSetChunkCacheCenterPacket;
-import net.minecraft.network.protocol.game.ClientboundSetChunkCacheRadiusPacket;
-import net.minecraft.network.protocol.game.ClientboundSetSimulationDistancePacket;
+import net.minecraft.network.protocol.game.*;
import net.minecraft.server.level.ChunkTrackingView;
import net.minecraft.server.level.ServerLevel;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.server.level.TicketType;
import net.minecraft.server.network.PlayerChunkSender;
+import net.minecraft.server.network.ServerGamePacketListenerImpl;
import net.minecraft.world.level.ChunkPos;
import net.minecraft.world.level.GameRules;
import net.minecraft.world.level.chunk.ChunkAccess;
@@ -43,6 +41,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
+import static org.dreeam.leaf.config.LeafConfig.LOGGER;
+
public final class RegionizedPlayerChunkLoader {
public static final TicketType<Long> PLAYER_TICKET = TicketType.create("chunk_system:player_ticket", Long::compareTo);
@@ -411,18 +411,81 @@ public final class RegionizedPlayerChunkLoader {
this.delayedTicketOps.addLast(op);
}
+ /**
+ * Sends a chunk to the player.
+ * If async chunk sending is enabled, this will prepare and send the chunk packet asynchronously.
+ * Otherwise, it will use the synchronous chunk sending implementation.
+ */
private void sendChunk(final int chunkX, final int chunkZ) {
- if (this.sentChunks.add(CoordinateUtils.getChunkKey(chunkX, chunkZ))) {
- ((ChunkSystemChunkHolder)((ChunkSystemServerLevel)this.world).moonrise$getChunkTaskScheduler().chunkHolderManager
- .getChunkHolder(chunkX, chunkZ).vanillaChunkHolder).moonrise$addReceivedChunk(this.player);
+ final long chunkKey = CoordinateUtils.getChunkKey(chunkX, chunkZ);
- final LevelChunk chunk = ((ChunkSystemLevel)this.world).moonrise$getFullChunkIfLoaded(chunkX, chunkZ);
+ if (!this.sentChunks.add(chunkKey)) {
+ throw new IllegalStateException();
+ }
- PlatformHooks.get().onChunkWatch(this.world, chunk, this.player);
- PlayerChunkSender.sendChunk(this.player.connection, this.world, chunk);
+ // Get the chunk now, as we need it for both sync and async paths
+ final LevelChunk chunk = ((ChunkSystemLevel)this.world).moonrise$getFullChunkIfLoaded(chunkX, chunkZ);
+ if (chunk == null) {
+ // Handle case where chunk is no longer loaded
+ this.sentChunks.remove(chunkKey);
return;
}
- throw new IllegalStateException();
+
+ // This part needs to remain on the main thread as it affects shared state
+ ((ChunkSystemChunkHolder)((ChunkSystemServerLevel)this.world).moonrise$getChunkTaskScheduler().chunkHolderManager
+ .getChunkHolder(chunkX, chunkZ).vanillaChunkHolder).moonrise$addReceivedChunk(this.player);
+
+ // Call onChunkWatch on the main thread as it might affect server state
+ PlatformHooks.get().onChunkWatch(this.world, chunk, this.player);
+
+ // Check if async chunk sending is enabled
+ if (org.dreeam.leaf.config.modules.async.AsyncChunkSend.enabled) {
+ // Async implementation
+ net.minecraft.Util.backgroundExecutor().execute(() -> {
+ try {
+ // Create and send the chunk packet asynchronously
+ final ServerGamePacketListenerImpl connection = this.player.connection;
+ final ServerLevel serverLevel = this.world;
+
+ // Create the packet
+ ClientboundLevelChunkWithLightPacket packet = new ClientboundLevelChunkWithLightPacket(chunk, serverLevel.getLightEngine(), null, null, serverLevel.chunkPacketBlockController.shouldModify(this.player, chunk));
+ // The packet is immediately ready
+ packet.setReady(true);
+
+ // Schedule sending on the main thread
+ serverLevel.getServer().execute(() -> {
+ if (this.removed || !this.sentChunks.contains(chunkKey)) {
+ // Player was removed or chunk was unloaded while we were preparing
+ return;
+ }
+
+ // Send the packet
+ connection.send(packet);
+
+ // Fire the load event
+ if (io.papermc.paper.event.packet.PlayerChunkLoadEvent.getHandlerList().getRegisteredListeners().length > 0) {
+ new io.papermc.paper.event.packet.PlayerChunkLoadEvent(
+ new org.bukkit.craftbukkit.CraftChunk(chunk),
+ this.player.getBukkitEntity()
+ ).callEvent();
+ }
+
+ // Send POI packets if needed
+ ChunkPos pos = chunk.getPos();
+ DebugPackets.sendPoiPacketsForChunk(serverLevel, pos);
+ });
+ } catch (Exception e) {
+ // Log the exception
+ LOGGER.error("Failed to send chunk asynchronously", e);
+ if (!this.removed) {
+ this.sentChunks.remove(chunkKey);
+ }
+ }
+ });
+ } else {
+ // Original synchronous implementation
+ PlayerChunkSender.sendChunk(this.player.connection, this.world, chunk);
+ }
}
private void sendUnloadChunk(final int chunkX, final int chunkZ) {

View File

@@ -0,0 +1,28 @@
package org.dreeam.leaf.config.modules.async;
import org.dreeam.leaf.config.ConfigModules;
import org.dreeam.leaf.config.EnumConfigCategory;
import org.dreeam.leaf.config.annotations.Experimental;
public class AsyncChunkSend extends ConfigModules {
public String getBasePath() {
return EnumConfigCategory.ASYNC.getBaseKeyName() + ".async-chunk-send";
}
@Experimental
public static boolean enabled = false;
@Override
public void onLoaded() {
config.addCommentRegionBased(getBasePath(),
"""
Makes chunk packet preparation and sending asynchronous to improve server performance.
This can significantly reduce main thread load when many players are loading chunks.""",
"""
使区块数据包准备和发送异步化以提高服务器性能.
当许多玩家同时加载区块时, 这可以显著减少主线程负载.""");
enabled = config.getBoolean(getBasePath() + ".enabled", enabled);
}
}

View File

@@ -1,211 +1,254 @@
package org.dreeam.leaf.util.map;
import it.unimi.dsi.fastutil.longs.LongCollection;
import it.unimi.dsi.fastutil.longs.LongIterator;
import it.unimi.dsi.fastutil.longs.LongOpenHashSet;
import it.unimi.dsi.fastutil.longs.LongSet;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.Iterator;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.*;
import java.util.concurrent.locks.ReentrantLock;
/**
* A thread-safe implementation of {@link LongOpenHashSet} using ConcurrentHashMap.KeySetView as backing storage.
* This implementation provides concurrent access and high performance for concurrent operations.
*/
@SuppressWarnings({"unused", "deprecation"})
public final class ConcurrentLongHashSet extends LongOpenHashSet implements LongSet { // Extending LongOpenHashSet for some moonrise usages
private final ConcurrentHashMap.KeySetView<Long, Boolean> backing;
public final class ConcurrentLongHashSet extends LongOpenHashSet implements LongSet {
private static final int DEFAULT_SEGMENTS = 16; // Should be power-of-two
private final Segment[] segments;
private final int segmentMask;
/**
* Creates a new empty concurrent long set.
*/
public ConcurrentLongHashSet() {
this.backing = ConcurrentHashMap.newKeySet();
this(DEFAULT_SEGMENTS);
}
@Override
public int size() {
return backing.size();
}
@Override
public boolean isEmpty() {
return backing.isEmpty();
}
@Override
public @NotNull LongIterator iterator() {
return new WrappingLongIterator(backing.iterator());
}
@NotNull
@Override
public Object @NotNull [] toArray() {
return backing.toArray();
}
@NotNull
@Override
public <T> T @NotNull [] toArray(@NotNull T @NotNull [] array) {
Objects.requireNonNull(array, "Array cannot be null");
return backing.toArray(array);
}
@Override
public boolean containsAll(@NotNull Collection<?> collection) {
Objects.requireNonNull(collection, "Collection cannot be null");
return backing.containsAll(collection);
}
@Override
public boolean addAll(@NotNull Collection<? extends Long> collection) {
Objects.requireNonNull(collection, "Collection cannot be null");
return backing.addAll(collection);
}
@Override
public boolean removeAll(@NotNull Collection<?> collection) {
Objects.requireNonNull(collection, "Collection cannot be null");
return backing.removeAll(collection);
}
@Override
public boolean retainAll(@NotNull Collection<?> collection) {
Objects.requireNonNull(collection, "Collection cannot be null");
return backing.retainAll(collection);
}
@Override
public void clear() {
backing.clear();
}
@Override
public boolean add(long key) {
return backing.add(key);
}
@Override
public boolean contains(long key) {
return backing.contains(key);
}
@Override
public long[] toLongArray() {
int size = backing.size();
long[] result = new long[size];
int i = 0;
for (Long value : backing) {
result[i++] = value;
}
return result;
}
@Override
public long[] toArray(long[] array) {
Objects.requireNonNull(array, "Array cannot be null");
long[] result = toLongArray();
if (array.length < result.length) {
return result;
}
System.arraycopy(result, 0, array, 0, result.length);
if (array.length > result.length) {
array[result.length] = 0;
}
return array;
}
@Override
public boolean addAll(LongCollection c) {
public boolean removeAll(@NotNull Collection<?> c) {
Objects.requireNonNull(c, "Collection cannot be null");
boolean modified = false;
LongIterator iterator = c.iterator();
while (iterator.hasNext()) {
modified |= add(iterator.nextLong());
for (Object obj : c) {
if (obj instanceof Long) {
modified |= remove((Long) obj);
}
}
return modified;
}
@Override
public boolean containsAll(LongCollection c) {
public boolean retainAll(@NotNull Collection<?> c) {
Objects.requireNonNull(c, "Collection cannot be null");
LongIterator iterator = c.iterator();
boolean modified = false;
LongIterator iterator = iterator();
while (iterator.hasNext()) {
if (!contains(iterator.nextLong())) {
return false;
long key = iterator.nextLong();
if (!c.contains(key)) {
modified |= remove(key);
}
}
return modified;
}
public ConcurrentLongHashSet(int concurrencyLevel) {
int numSegments = Integer.highestOneBit(concurrencyLevel) << 1;
this.segmentMask = numSegments - 1;
this.segments = new Segment[numSegments];
for (int i = 0; i < numSegments; i++) {
segments[i] = new Segment();
}
}
// ------------------- Core Methods -------------------
@Override
public boolean add(long key) {
Segment segment = getSegment(key);
segment.lock();
try {
return segment.set.add(key);
} finally {
segment.unlock();
}
}
@Override
public boolean contains(long key) {
Segment segment = getSegment(key);
segment.lock();
try {
return segment.set.contains(key);
} finally {
segment.unlock();
}
}
@Override
public boolean remove(long key) {
Segment segment = getSegment(key);
segment.lock();
try {
return segment.set.remove(key);
} finally {
segment.unlock();
}
}
// ------------------- Bulk Operations -------------------
@Override
public boolean containsAll(@NotNull Collection<?> c) {
Objects.requireNonNull(c, "Collection cannot be null");
for (Object obj : c) {
if (obj == null || !(obj instanceof Long)) return false;
if (!contains((Long) obj)) return false;
}
return true;
}
@Override
public boolean removeAll(LongCollection c) {
public boolean addAll(@NotNull Collection<? extends Long> c) {
Objects.requireNonNull(c, "Collection cannot be null");
boolean modified = false;
LongIterator iterator = c.iterator();
while (iterator.hasNext()) {
modified |= remove(iterator.nextLong());
for (Long value : c) {
modified |= add(value);
}
return modified;
}
// ------------------- Locking Helpers -------------------
private Segment getSegment(long key) {
int hash = spreadHash(Long.hashCode(key));
return segments[hash & segmentMask];
}
private static int spreadHash(int h) {
return (h ^ (h >>> 16)) & 0x7fffffff; // Avoid negative indices
}
// ------------------- Size Stuff -------------------
@Override
public boolean retainAll(LongCollection c) {
Objects.requireNonNull(c, "Collection cannot be null");
return backing.retainAll(c);
public int size() {
int count = 0;
for (Segment segment : segments) {
segment.lock();
count += segment.set.size();
segment.unlock();
}
return count;
}
@Override
public boolean remove(long k) {
return backing.remove(k);
public boolean isEmpty() {
for (Segment segment : segments) {
segment.lock();
boolean empty = segment.set.isEmpty();
segment.unlock();
if (!empty) return false;
}
return true;
}
// ------------------- Cleanup -------------------
@Override
public void clear() {
for (Segment segment : segments) {
segment.lock();
segment.set.clear();
segment.unlock();
}
}
// ------------------- Iteration -------------------
@Override
public LongIterator iterator() {
return new CompositeLongIterator();
}
private class CompositeLongIterator implements LongIterator {
private int currentSegment = 0;
private LongIterator currentIterator;
CompositeLongIterator() {
advanceSegment();
}
private void advanceSegment() {
while (currentSegment < segments.length) {
segments[currentSegment].lock();
currentIterator = segments[currentSegment].set.iterator();
if (currentIterator.hasNext()) break;
segments[currentSegment].unlock();
currentSegment++;
}
}
@Override
public boolean hasNext() {
if (currentIterator == null) return false;
if (currentIterator.hasNext()) return true;
segments[currentSegment].unlock();
currentSegment++;
advanceSegment();
return currentIterator != null && currentIterator.hasNext();
}
@Override
public long nextLong() {
if (!hasNext()) throw new NoSuchElementException();
return currentIterator.nextLong();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
// ------------------- Segment (these nuts) -------------------
private static class Segment {
final LongOpenHashSet set = new LongOpenHashSet();
final ReentrantLock lock = new ReentrantLock();
void lock() {
lock.lock();
}
void unlock() {
lock.unlock();
}
}
// ignore
@Override
public long[] toLongArray() {
long[] result = new long[size()];
int i = 0;
LongIterator it = iterator();
while (it.hasNext()) {
result[i++] = it.nextLong();
}
return result;
}
@Override
public long[] toArray(long[] a) {
long[] result = toLongArray();
if (a.length < result.length) return result;
System.arraycopy(result, 0, a, 0, result.length);
return a;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof LongSet that)) return false;
if (size() != that.size()) return false;
return containsAll(that);
if (!(o instanceof LongSet)) return false;
LongSet that = (LongSet) o;
return size() == that.size() && containsAll(that);
}
@Override
public int hashCode() {
return backing.hashCode();
int hash = 0;
LongIterator it = iterator();
while (it.hasNext()) {
hash += Long.hashCode(it.nextLong());
}
return hash;
}
@Override
public String toString() {
return backing.toString();
}
@Override @NotNull public Object[] toArray() { return Collections.unmodifiableSet(this).toArray(); }
@Override @NotNull public <T> T[] toArray(@NotNull T[] a) { return Collections.unmodifiableSet(this).toArray(a); }
static class WrappingLongIterator implements LongIterator {
private final Iterator<Long> backing;
WrappingLongIterator(Iterator<Long> backing) {
this.backing = Objects.requireNonNull(backing);
}
@Override
public boolean hasNext() {
return backing.hasNext();
}
@Override
public long nextLong() {
return backing.next();
}
@Override
public Long next() {
return backing.next();
}
@Override
public void remove() {
backing.remove();
}
}
}