From bdccf345b787ce58b079a4a76db9c85dcdc926f9 Mon Sep 17 00:00:00 2001 From: Creeam <102713261+HaHaWTH@users.noreply.github.com> Date: Tue, 11 Feb 2025 13:12:47 -0800 Subject: [PATCH] Thread safety fixes (#224) --- .../features/0084-Multithreaded-Tracker.patch | 13 ++ ...003-Pufferfish-Optimize-mob-spawning.patch | 12 +- .../leaf/util/map/ConcurrentLongHashSet.java | 211 ++++++++++++++++++ 3 files changed, 235 insertions(+), 1 deletion(-) create mode 100644 leaf-server/src/main/java/org/dreeam/leaf/util/map/ConcurrentLongHashSet.java diff --git a/leaf-server/minecraft-patches/features/0084-Multithreaded-Tracker.patch b/leaf-server/minecraft-patches/features/0084-Multithreaded-Tracker.patch index 004a52b9..02700732 100644 --- a/leaf-server/minecraft-patches/features/0084-Multithreaded-Tracker.patch +++ b/leaf-server/minecraft-patches/features/0084-Multithreaded-Tracker.patch @@ -23,6 +23,19 @@ for the case of some NPC plugins which using real entity type, e.g. Citizens. But it is still recommending to use those packet based, virtual entity based NPC plugins, e.g. ZNPC Plus, Adyeshach, Fancy NPC, etc. +diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java b/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java +index dd2509996bfd08e8c3f9f2be042229eac6d7692d..a35e9fae8f8da0c42f0616c4f78dc396492673aa 100644 +--- a/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java ++++ b/ca/spottedleaf/moonrise/patches/chunk_system/player/RegionizedPlayerChunkLoader.java +@@ -342,7 +342,7 @@ public final class RegionizedPlayerChunkLoader { + private boolean canGenerateChunks = true; + + private final ArrayDeque> delayedTicketOps = new ArrayDeque<>(); +- private final LongOpenHashSet sentChunks = new LongOpenHashSet(); ++ private final LongOpenHashSet sentChunks = org.dreeam.leaf.config.modules.async.MultithreadedTracker.enabled && !org.dreeam.leaf.config.modules.async.MultithreadedTracker.compatModeEnabled ? new org.dreeam.leaf.util.map.ConcurrentLongHashSet() : new LongOpenHashSet(); // Leaf - Multithreaded tracker + + private static final byte CHUNK_TICKET_STAGE_NONE = 0; + private static final byte CHUNK_TICKET_STAGE_LOADING = 1; diff --git a/net/minecraft/server/level/ChunkMap.java b/net/minecraft/server/level/ChunkMap.java index 5d9d233e3a568aa6297ed9c703fa450f98158602..8986c059e7aadb58ae8d9ab7b848de10f9faa6b2 100644 --- a/net/minecraft/server/level/ChunkMap.java diff --git a/leaf-server/paper-patches/features/0003-Pufferfish-Optimize-mob-spawning.patch b/leaf-server/paper-patches/features/0003-Pufferfish-Optimize-mob-spawning.patch index b7143462..345bc1c6 100644 --- a/leaf-server/paper-patches/features/0003-Pufferfish-Optimize-mob-spawning.patch +++ b/leaf-server/paper-patches/features/0003-Pufferfish-Optimize-mob-spawning.patch @@ -7,6 +7,7 @@ Original license: GPL v3 Original project: https://github.com/pufferfish-gg/Pufferfish Co-authored-by: booky10 +Co-authored-by: HaHaWTH <102713261+HaHaWTH@users.noreply.github.com> This patch aims to reduce the main-thread impact of mob spawning by offloading as much work as possible to other threads. It is possible for @@ -22,9 +23,18 @@ and, in my opinion, worth the low risk of minor mob-spawning-related inconsistencies. diff --git a/src/main/java/ca/spottedleaf/moonrise/common/list/IteratorSafeOrderedReferenceSet.java b/src/main/java/ca/spottedleaf/moonrise/common/list/IteratorSafeOrderedReferenceSet.java -index c21e00812f1aaa1279834a0562d360d6b89e146c..877d2095a066854939f260ca4b0b8c7b5abb620f 100644 +index c21e00812f1aaa1279834a0562d360d6b89e146c..4ae478c04ef44c91408a7f3f0405291f91794873 100644 --- a/src/main/java/ca/spottedleaf/moonrise/common/list/IteratorSafeOrderedReferenceSet.java +++ b/src/main/java/ca/spottedleaf/moonrise/common/list/IteratorSafeOrderedReferenceSet.java +@@ -10,7 +10,7 @@ public final class IteratorSafeOrderedReferenceSet { + public static final int ITERATOR_FLAG_SEE_ADDITIONS = 1 << 0; + + private final Reference2IntLinkedOpenHashMap indexMap; +- private int firstInvalidIndex = -1; ++ private volatile int firstInvalidIndex = -1; // Leaf - Async mob spawning - volatile + + /* list impl */ + private E[] listElements; @@ -18,7 +18,7 @@ public final class IteratorSafeOrderedReferenceSet { private final double maxFragFactor; diff --git a/leaf-server/src/main/java/org/dreeam/leaf/util/map/ConcurrentLongHashSet.java b/leaf-server/src/main/java/org/dreeam/leaf/util/map/ConcurrentLongHashSet.java new file mode 100644 index 00000000..5a6ab406 --- /dev/null +++ b/leaf-server/src/main/java/org/dreeam/leaf/util/map/ConcurrentLongHashSet.java @@ -0,0 +1,211 @@ +package org.dreeam.leaf.util.map; + +import it.unimi.dsi.fastutil.longs.LongCollection; +import it.unimi.dsi.fastutil.longs.LongIterator; +import it.unimi.dsi.fastutil.longs.LongOpenHashSet; +import it.unimi.dsi.fastutil.longs.LongSet; +import org.jetbrains.annotations.NotNull; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +/** + * A thread-safe implementation of {@link LongOpenHashSet} using ConcurrentHashMap.KeySetView as backing storage. + * This implementation provides concurrent access and high performance for concurrent operations. + */ +@SuppressWarnings({"unused", "deprecation"}) +public final class ConcurrentLongHashSet extends LongOpenHashSet implements LongSet { // Extending LongOpenHashSet for some moonrise usages + private final ConcurrentHashMap.KeySetView backing; + + /** + * Creates a new empty concurrent long set. + */ + public ConcurrentLongHashSet() { + this.backing = ConcurrentHashMap.newKeySet(); + } + + @Override + public int size() { + return backing.size(); + } + + @Override + public boolean isEmpty() { + return backing.isEmpty(); + } + + @Override + public @NotNull LongIterator iterator() { + return new WrappingLongIterator(backing.iterator()); + } + + @NotNull + @Override + public Object @NotNull [] toArray() { + return backing.toArray(); + } + + @NotNull + @Override + public T @NotNull [] toArray(@NotNull T @NotNull [] array) { + Objects.requireNonNull(array, "Array cannot be null"); + return backing.toArray(array); + } + + @Override + public boolean containsAll(@NotNull Collection collection) { + Objects.requireNonNull(collection, "Collection cannot be null"); + return backing.containsAll(collection); + } + + @Override + public boolean addAll(@NotNull Collection collection) { + Objects.requireNonNull(collection, "Collection cannot be null"); + return backing.addAll(collection); + } + + @Override + public boolean removeAll(@NotNull Collection collection) { + Objects.requireNonNull(collection, "Collection cannot be null"); + return backing.removeAll(collection); + } + + @Override + public boolean retainAll(@NotNull Collection collection) { + Objects.requireNonNull(collection, "Collection cannot be null"); + return backing.retainAll(collection); + } + + @Override + public void clear() { + backing.clear(); + } + + @Override + public boolean add(long key) { + return backing.add(key); + } + + @Override + public boolean contains(long key) { + return backing.contains(key); + } + + @Override + public long[] toLongArray() { + int size = backing.size(); + long[] result = new long[size]; + int i = 0; + for (Long value : backing) { + result[i++] = value; + } + return result; + } + + @Override + public long[] toArray(long[] array) { + Objects.requireNonNull(array, "Array cannot be null"); + long[] result = toLongArray(); + if (array.length < result.length) { + return result; + } + System.arraycopy(result, 0, array, 0, result.length); + if (array.length > result.length) { + array[result.length] = 0; + } + return array; + } + + @Override + public boolean addAll(LongCollection c) { + Objects.requireNonNull(c, "Collection cannot be null"); + boolean modified = false; + LongIterator iterator = c.iterator(); + while (iterator.hasNext()) { + modified |= add(iterator.nextLong()); + } + return modified; + } + + @Override + public boolean containsAll(LongCollection c) { + Objects.requireNonNull(c, "Collection cannot be null"); + LongIterator iterator = c.iterator(); + while (iterator.hasNext()) { + if (!contains(iterator.nextLong())) { + return false; + } + } + return true; + } + + @Override + public boolean removeAll(LongCollection c) { + Objects.requireNonNull(c, "Collection cannot be null"); + boolean modified = false; + LongIterator iterator = c.iterator(); + while (iterator.hasNext()) { + modified |= remove(iterator.nextLong()); + } + return modified; + } + + @Override + public boolean retainAll(LongCollection c) { + Objects.requireNonNull(c, "Collection cannot be null"); + return backing.retainAll(c); + } + + @Override + public boolean remove(long k) { + return backing.remove(k); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof LongSet that)) return false; + if (size() != that.size()) return false; + return containsAll(that); + } + + @Override + public int hashCode() { + return backing.hashCode(); + } + + @Override + public String toString() { + return backing.toString(); + } + + static class WrappingLongIterator implements LongIterator { + private final Iterator backing; + + WrappingLongIterator(Iterator backing) { + this.backing = Objects.requireNonNull(backing); + } + + @Override + public boolean hasNext() { + return backing.hasNext(); + } + + @Override + public long nextLong() { + return backing.next(); + } + + @Override + public Long next() { + return backing.next(); + } + + @Override + public void remove() { + backing.remove(); + } + } +}