Compare commits

...

8 Commits

Author SHA1 Message Date
MrHua269
8d6d9119a8 Buffered linear region format 1.0 2025-06-13 17:50:42 +08:00
Bacteriawa
40a455c92f Updated Upstream (Folia) 2025-06-13 02:32:12 +08:00
Bacteriawa
23b9e44ff3 Merge branch 'dev/1.21.4-hardfork' of https://github.com/LuminolMC/Luminol into dev/1.21.4-hardfork 2025-06-13 02:29:33 +08:00
Bacteriawa
da25676332 Remove faster chunk serialization 2025-05-27 19:45:24 +08:00
Bacteriawa
3d60c6c0fc Remove faster chunk serialization 2025-05-26 23:49:10 +08:00
Bacteriawa
9a6720e5f1 Merge branch 'dev/1.21.4-hardfork' of https://github.com/LuminolMC/Luminol into dev/1.21.4-hardfork 2025-05-23 23:00:16 +08:00
Bacteriawa
61c3cc8c74 Updated Upstream (Folia) 2025-05-20 16:31:49 +08:00
Bacteriawa
8e697a3dd6 Updated Upstream (Folia) 2025-05-20 14:13:16 +08:00
27 changed files with 665 additions and 436 deletions

View File

@@ -2,7 +2,7 @@ group = me.earthme.luminol
version=1.21.4-R0.1-SNAPSHOT
mcVersion=1.21.4
foliaRef=f69d1c974daa7646b869d4c11dcc86915eec96a8
foliaRef=b785bccd6fd270b4897a165772124eef55d58c6c
org.gradle.configuration-cache=true
org.gradle.caching=true

View File

@@ -22,7 +22,7 @@ index a814512fcfb85312474ae2c2c21443843bf57831..2e084a5b28cbe4737f48c25e10af5892
public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(
final int chunkX, final int chunkZ, final CompoundTag compound
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java b/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
index 98fbc5c8044bd945d64569f13412a6e7e49a4e7f..9709d60d3b2a5478cd6c711669e32f28ac105667 100644
index b5c300f867f4f9670f51b6082982e9363e7a7326..8bafd7ac9706dbf6c43a136733a63ecc3ffb430d 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
@@ -1260,7 +1260,7 @@ public final class MoonriseRegionFileIO {

View File

@@ -1,219 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Bacteriawa <A3167717663@hotmail.com>
Date: Wed, 30 Nov 2022 21:51:16 +0100
Subject: [PATCH] Gale: Faster chunk serialization
Co-authored by: Martijn Muijsers <martijnmuijsers@live.nl>, Angeline <jellysquid3@users.noreply.github.com>
As part of: Gale (https://github.com/GaleMC/Gale/blob/276e903b2688f23b19bdc8d493c0bf87656d2400/patches/server/0096-Faster-chunk-serialization.patch), Lithium (https://github.com/CaffeineMC/lithium-fabric)
Licensed under: LGPL-3.0 (https://www.gnu.org/licenses/lgpl-3.0.html)
diff --git a/net/minecraft/util/BitStorage.java b/net/minecraft/util/BitStorage.java
index 02502d50f0255f5bbcc0ecb965abb48cc1a112da..322a1ba06d6aed44ec67dc3f1831ac6b05c82fe0 100644
--- a/net/minecraft/util/BitStorage.java
+++ b/net/minecraft/util/BitStorage.java
@@ -21,6 +21,8 @@ public interface BitStorage extends ca.spottedleaf.moonrise.patches.block_counti
BitStorage copy();
+ <T> void compact(net.minecraft.world.level.chunk.Palette<T> srcPalette, net.minecraft.world.level.chunk.Palette<T> dstPalette, short[] out); // Gale - Lithium - faster chunk serialization
+
// Paper start - block counting
// provide default impl in case mods implement this...
@Override
diff --git a/net/minecraft/util/SimpleBitStorage.java b/net/minecraft/util/SimpleBitStorage.java
index e6306a68c8652d4c5d22d5ecb1416f5f931f76ee..2a3d34733b61c73729daa4da61f33e2c2e7b6c72 100644
--- a/net/minecraft/util/SimpleBitStorage.java
+++ b/net/minecraft/util/SimpleBitStorage.java
@@ -465,4 +465,36 @@ public class SimpleBitStorage implements BitStorage {
super(message);
}
}
+
+ // Gale start - Lithium - faster chunk serialization
+ @Override
+ public <T> void compact(net.minecraft.world.level.chunk.Palette<T> srcPalette, net.minecraft.world.level.chunk.Palette<T> dstPalette, short[] out) {
+ if (this.size >= Short.MAX_VALUE) throw new IllegalStateException("Array too large");
+ if (this.size != out.length) throw new IllegalStateException("Array size mismatch");
+
+ short[] mappings = new short[(int) (this.mask + 1)];
+
+ int idx = 0;
+
+ for (long word : this.data) {
+ long bits = word;
+
+ for (int elementIdx = 0; elementIdx < this.valuesPerLong; ++elementIdx) {
+ int value = (int) (bits & this.mask);
+ int remappedId = mappings[value];
+
+ if (remappedId == 0) {
+ remappedId = dstPalette.idFor(srcPalette.valueFor(value)) + 1;
+ mappings[value] = (short) remappedId;
+ }
+
+ out[idx] = (short) (remappedId - 1);
+ bits >>= this.bits;
+
+ ++idx;
+
+ if (idx >= this.size) return;
+ }
+ }
+ } // Gale end - Lithium - faster chunk serialization
}
diff --git a/net/minecraft/util/ZeroBitStorage.java b/net/minecraft/util/ZeroBitStorage.java
index 09fd99c9cbd23b5f3c899bfb00c9b89651948ed8..90a85a00c6208d2db65cafb164cd95e6128b6dc4 100644
--- a/net/minecraft/util/ZeroBitStorage.java
+++ b/net/minecraft/util/ZeroBitStorage.java
@@ -19,6 +19,8 @@ public class ZeroBitStorage implements BitStorage {
return 0;
}
+ @Override public <T> void compact(net.minecraft.world.level.chunk.Palette<T> srcPalette, net.minecraft.world.level.chunk.Palette<T> dstPalette, short[] out) {} // Gale - Lithium - faster chunk serialization
+
@Override
public final void set(int index, int value) { // Paper - Perf: Optimize SimpleBitStorage
//Validate.inclusiveBetween(0L, (long)(this.size - 1), (long)index); // Paper - Perf: Optimize SimpleBitStorage
diff --git a/net/minecraft/world/level/chunk/PaletteResize.java b/net/minecraft/world/level/chunk/PaletteResize.java
index c723606fa0be811e580ba47de8c9c575583cc930..60d3176477c201643e1657751fcffad511b2994f 100644
--- a/net/minecraft/world/level/chunk/PaletteResize.java
+++ b/net/minecraft/world/level/chunk/PaletteResize.java
@@ -1,5 +1,5 @@
package net.minecraft.world.level.chunk;
-interface PaletteResize<T> {
+public interface PaletteResize<T> { // Gale - Lithium - faster chunk serialization - package -> public
int onResize(int bits, T objectAdded);
}
diff --git a/net/minecraft/world/level/chunk/PalettedContainer.java b/net/minecraft/world/level/chunk/PalettedContainer.java
index f5da433050fd3060e0335d4002d520ebe8cd691f..5a6e699df0a177ea6a919cad609a508678bff823 100644
--- a/net/minecraft/world/level/chunk/PalettedContainer.java
+++ b/net/minecraft/world/level/chunk/PalettedContainer.java
@@ -25,6 +25,21 @@ import net.minecraft.util.ThreadingDetector;
import net.minecraft.util.ZeroBitStorage;
public class PalettedContainer<T> implements PaletteResize<T>, PalettedContainerRO<T> {
+ // Gale start - Lithium - faster chunk serialization
+ private static final ThreadLocal<short[]> CACHED_ARRAY_4096 = ThreadLocal.withInitial(() -> new short[4096]);
+ private static final ThreadLocal<short[]> CACHED_ARRAY_64 = ThreadLocal.withInitial(() -> new short[64]);
+ private Optional<LongStream> asOptional(long[] data) {
+ return Optional.of(Arrays.stream(data));
+ }
+ private short[] getOrCreate(int size) {
+ return switch (size) {
+ case 64 -> CACHED_ARRAY_64.get();
+ case 4096 -> CACHED_ARRAY_4096.get();
+ default -> new short[size];
+ };
+ }
+ // Gale end - Lithium - faster chunk serialization
+
private static final int MIN_PALETTE_BITS = 0;
private final PaletteResize<T> dummyPaletteResize = (bits, objectAdded) -> 0;
public final IdMap<T> registry;
@@ -344,28 +359,53 @@ public class PalettedContainer<T> implements PaletteResize<T>, PalettedContainer
public synchronized PalettedContainerRO.PackedData<T> pack(IdMap<T> registry, PalettedContainer.Strategy strategy) { // Paper - synchronize
this.acquire();
- PalettedContainerRO.PackedData var12;
+ // Gale start - Lithium - faster chunk serialization
+ Optional<LongStream> data = Optional.empty();
+ List<T> elements = null;
try {
- HashMapPalette<T> hashMapPalette = new HashMapPalette<>(registry, this.data.storage.getBits(), this.dummyPaletteResize);
- int size = strategy.size();
- int[] ints = new int[size];
- this.data.storage.unpack(ints);
- swapPalette(ints, id -> hashMapPalette.idFor(this.data.palette.valueFor(id)));
- int i = strategy.calculateBitsForSerialization(registry, hashMapPalette.getSize());
- Optional<LongStream> optional;
- if (i != 0) {
- SimpleBitStorage simpleBitStorage = new SimpleBitStorage(i, size, ints);
- optional = Optional.of(Arrays.stream(simpleBitStorage.getRaw()));
- } else {
- optional = Optional.empty();
+ // The palette that will be serialized
+ net.caffeinemc.mods.lithium.common.world.chunk.LithiumHashPalette<T> hashPalette = null;
+
+ final Palette<T> palette = this.data.palette();
+ final BitStorage storage = this.data.storage();
+ if (storage instanceof ZeroBitStorage || palette.getSize() == 1) {
+ // If the palette only contains one entry, don't attempt to repack it.
+ elements = List.of(palette.valueFor(0));
+ } else if (palette instanceof net.caffeinemc.mods.lithium.common.world.chunk.LithiumHashPalette<T> lithiumHashPalette) {
+ hashPalette = lithiumHashPalette;
}
- var12 = new PalettedContainerRO.PackedData<>(hashMapPalette.getEntries(), optional);
+ if (elements == null) {
+ var compactedPalette = new net.caffeinemc.mods.lithium.common.world.chunk.LithiumHashPalette<>(registry, storage.getBits(), this.dummyPaletteResize);
+ short[] array = this.getOrCreate(strategy.size());
+
+ storage.compact(this.data.palette(), compactedPalette, array);
+
+ // If the palette didn't change during compaction, do a simple copy of the data array
+ if (hashPalette != null && hashPalette.getSize() == compactedPalette.getSize() && storage.getBits() == strategy.calculateBitsForSerialization(registry, hashPalette.getSize())) { // paletteSize can de-sync from palette - see https://github.com/CaffeineMC/lithium-fabric/issues/279
+ data = this.asOptional(storage.getRaw().clone());
+ elements = hashPalette.getElements();
+ } else {
+ int bits = strategy.calculateBitsForSerialization(registry, compactedPalette.getSize());
+ if (bits != 0) {
+ // Re-pack the integer array as the palette has changed size
+ SimpleBitStorage copy = new SimpleBitStorage(bits, array.length);
+ for (int i = 0; i < array.length; ++i)
+ copy.set(i, array[i]);
+
+ // We don't need to clone the data array as we are the sole owner of it
+ data = this.asOptional(copy.getRaw());
+ }
+
+ elements = compactedPalette.getElements();
+ }
+ }
} finally {
this.release();
}
- return var12;
+ return new PalettedContainerRO.PackedData<>(elements, data);
+ // Gale end - Lithium - faster chunk serialization
}
private static <T> void swapPalette(int[] bits, IntUnaryOperator operator) {
@@ -405,13 +445,30 @@ public class PalettedContainer<T> implements PaletteResize<T>, PalettedContainer
@Override
public void count(PalettedContainer.CountConsumer<T> countConsumer) {
- if (this.data.palette.getSize() == 1) {
- countConsumer.accept(this.data.palette.valueFor(0), this.data.storage.getSize());
- } else {
- Int2IntOpenHashMap map = new Int2IntOpenHashMap();
- this.data.storage.getAll(id -> map.addTo(id, 1));
- map.int2IntEntrySet().forEach(idEntry -> countConsumer.accept(this.data.palette.valueFor(idEntry.getIntKey()), idEntry.getIntValue()));
+ // Gale start - Lithium - faster chunk serialization
+ int len = this.data.palette().getSize();
+
+ // Do not allocate huge arrays if we're using a large palette
+ if (len > 4096) {
+ // VanillaCopy
+ if (this.data.palette.getSize() == 1) {
+ countConsumer.accept(this.data.palette.valueFor(0), this.data.storage.getSize());
+ } else {
+ Int2IntOpenHashMap map = new Int2IntOpenHashMap();
+ this.data.storage.getAll(id -> map.addTo(id, 1));
+ map.int2IntEntrySet().forEach(idEntry -> countConsumer.accept(this.data.palette.valueFor(idEntry.getIntKey()), idEntry.getIntValue()));
+ }
+ }
+ short[] counts = new short[len];
+ this.data.storage().getAll(i -> counts[i]++);
+
+ for (int i = 0; i < counts.length; i++) {
+ T obj = this.data.palette().valueFor(i);
+
+ if (obj != null)
+ countConsumer.accept(obj, counts[i]);
}
+ // Gale end - Lithium - faster chunk serialization
}
record Configuration<T>(Palette.Factory factory, int bits) {

View File

@@ -5,7 +5,7 @@ Subject: [PATCH] SparklyPaper Optimize canSee checks
diff --git a/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java b/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
index 3819c748473f2cbf53890867e552985eeb8ee35f..8cb02f08ccddd69ad3ea868e848c61dfb4fa5fe7 100644
index b6e451c8a41a1d25bac679d18156d6e813efda2a..48c85729a6de20b554325ca29c2448d515bbc7a7 100644
--- a/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
+++ b/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
@@ -210,7 +210,7 @@ public class CraftPlayer extends CraftHumanEntity implements Player {

View File

@@ -0,0 +1,621 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java
@@ -1,0 +_,618 @@
+package me.earthme.luminol.data;
+
+import abomination.IRegionFile;
+import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO;
+import me.earthme.luminol.utils.DirectBufferReleaser;
+import net.jpountz.xxhash.XXHash32;
+import net.jpountz.xxhash.XXHashFactory;
+import net.minecraft.nbt.CompoundTag;
+import net.minecraft.world.level.ChunkPos;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+public class BufferedLinearRegionFile implements IRegionFile {
+ private static final double AUTO_COMPACT_PERCENT = 3.0 / 5.0; // 60 %
+ private static final long AUTO_COMPACT_SIZE = 1024 * 1024; // 1 MiB
+
+ private static final long SUPER_BLOCK = 0x1145141919810L;
+ private static final int HASH_SEED = 0x0721;
+ private static final byte VERSION = 0x01;
+
+ private final Path filePath;
+
+ private final ReadWriteLock fileAccessLock = new ReentrantReadWriteLock();
+ private final XXHash32 xxHash32 = XXHashFactory.fastestInstance().hash32();
+ private final Sector[] sectors = new Sector[1024];
+ private long currentAcquiredIndex = this.headerSize();
+ private byte compressionLevel = 6;
+ private int xxHash32Seed = HASH_SEED;
+ private FileChannel channel;
+
+ public BufferedLinearRegionFile(Path filePath, int compressionLevel) throws IOException {
+ this(filePath);
+ this.compressionLevel = (byte) compressionLevel;
+ }
+
+ public BufferedLinearRegionFile(Path filePath) throws IOException {
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ this.filePath = filePath;
+
+ for (int i = 0; i < 1024; i++) {
+ this.sectors[i] = new Sector(i, this.headerSize(), 0);
+ }
+
+ this.readHeaders();
+ }
+
+ private void readHeaders() throws IOException {
+ if (this.channel.size() < this.headerSize()) {
+ return;
+ }
+
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(buffer, 0);
+ buffer.flip();
+
+ if (buffer.getLong() != SUPER_BLOCK || buffer.get() != VERSION) {
+ throw new IOException("Invalid file format or version mismatch");
+ }
+
+ this.compressionLevel = buffer.get(); // Compression level (not used)
+ this.xxHash32Seed = buffer.getInt(); // XXHash32 seed
+ this.currentAcquiredIndex = buffer.getLong(); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ sector.restoreFrom(buffer);
+ if (sector.hasData()) {
+ this.currentAcquiredIndex = Math.max(this.currentAcquiredIndex, sector.offset + sector.length);
+ }
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private void writeHeaders() throws IOException {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+
+ buffer.putLong(SUPER_BLOCK); // Magic
+ buffer.put(VERSION); // Version
+ buffer.put(this.compressionLevel); // Compression level
+ buffer.putInt(this.xxHash32Seed); // XXHash32 seed
+ buffer.putLong(this.currentAcquiredIndex); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ buffer.put(sector.getEncoded());
+ }
+
+ buffer.flip();
+
+ long offset = 0;
+ while (buffer.hasRemaining()) {
+ offset += this.channel.write(buffer, offset);
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private int sectorSize() {
+ return this.sectors.length * Sector.sizeOfSingle();
+ }
+
+ private int headerSize() {
+ int result = 0;
+
+ result += Long.BYTES; // Magic
+ result += Byte.BYTES; // Version
+ result += Byte.BYTES; // Compression level
+ result += Integer.BYTES; // XXHash32 seed
+ result += Long.BYTES; // Acquired index
+ result += this.sectorSize(); // Sectors
+
+ return result;
+ }
+
+ private void flushInternal() throws IOException {
+ this.writeHeaders();
+
+ long spareSize = this.channel.size();
+
+ spareSize -= this.headerSize();
+ for (Sector sector : this.sectors) {
+ spareSize -= sector.length;
+ }
+
+ long sectorSize = 0;
+ for (Sector sector : this.sectors) {
+ sectorSize += sector.length;
+ }
+
+ if (spareSize > AUTO_COMPACT_SIZE && (double)spareSize > ((double)sectorSize) * AUTO_COMPACT_PERCENT) {
+ this.compact();
+ }
+ }
+
+ private void closeInternal() throws IOException {
+ this.writeHeaders();
+ this.channel.force(true);
+ this.compact();
+ this.channel.close();
+ }
+
+ private void compact() throws IOException {
+ this.writeHeaders(); // save headers for compact
+ this.channel.force(true);
+ try (FileChannel tempChannel = FileChannel.open(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ )){
+ // get the latest head in file
+ final ByteBuffer headerBuffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(headerBuffer, 0);
+ headerBuffer.flip();
+
+ long offsetHeader = 0;
+ while (headerBuffer.hasRemaining()) {
+ offsetHeader += tempChannel.write(headerBuffer, offsetHeader);
+ }
+ DirectBufferReleaser.clean(headerBuffer);
+
+ int offsetPointer = this.headerSize();
+ for (Sector sector : this.sectors) {
+ if (!sector.hasData()) {
+ continue;
+ }
+
+ final ByteBuffer sectorData = sector.read(this.channel);
+ final int length = sectorData.remaining();
+
+ final Sector newRecalculated = new Sector(sector.index, offsetPointer, length);
+ offsetPointer += length;
+ this.sectors[sector.index] = newRecalculated; // update sector infos
+
+ newRecalculated.hasData = true;
+
+ long offset = newRecalculated.offset;
+ while (sectorData.hasRemaining()) {
+ offset += tempChannel.write(sectorData, offset);
+ }
+
+ DirectBufferReleaser.clean(sectorData);
+ }
+
+ tempChannel.force(true);
+ this.currentAcquiredIndex = tempChannel.size();
+ }
+
+ Files.move(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ this.filePath,
+ java.nio.file.StandardCopyOption.REPLACE_EXISTING
+ );
+
+ this.reopenChannel();
+ this.writeHeaders();
+ }
+
+ private void reopenChannel() throws IOException {
+ if (this.channel.isOpen()) {
+ this.channel.close();
+ }
+
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ }
+
+ private void writeChunkDataRaw(int chunkOrdinal, ByteBuffer chunkData) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.store(chunkData, this.channel);
+ }
+
+ private @Nullable ByteBuffer readChunkDataRaw(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ if (!sector.hasData()) {
+ return null;
+ }
+
+ return sector.read(this.channel);
+ }
+
+ private void clearChunkData(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.clear();
+
+ this.writeHeaders();
+ }
+
+ private static int getChunkIndex(int x, int z) {
+ return (x & 31) + ((z & 31) << 5);
+ }
+
+ private boolean hasData(int chunkOriginal) {
+ return this.sectors[chunkOriginal].hasData();
+ }
+
+ private void writeChunk(int x, int z, @NotNull ByteBuffer data) throws IOException {
+ final int chunkIndex = getChunkIndex(x, z);
+
+ final int oldPositionOfData = data.position();
+ final int xxHash32OfData = this.xxHash32.hash(data, this.xxHash32Seed);
+ data.position(oldPositionOfData);
+
+ final ByteBuffer compressedData = this.compress(this.ensureDirectBuffer(data));
+ // uncompressed length + timestamp + xxhash32
+ final ByteBuffer chunkSectionBuilder = ByteBuffer.allocateDirect(compressedData.remaining() + 4 + 8 + 4);
+
+ chunkSectionBuilder.putInt(data.remaining()); // Uncompressed length
+ chunkSectionBuilder.putLong(System.nanoTime()); // Timestamp
+ chunkSectionBuilder.putInt(xxHash32OfData); // xxHash32 of the original data
+ chunkSectionBuilder.put(compressedData); // Compressed data
+ chunkSectionBuilder.flip();
+
+ this.writeChunkDataRaw(chunkIndex, chunkSectionBuilder);
+ DirectBufferReleaser.clean(chunkSectionBuilder);
+ }
+
+ private @Nullable ByteBuffer readChunk(int x, int z) throws IOException {
+ final ByteBuffer compressed = this.readChunkDataRaw(getChunkIndex(x, z));
+
+ if (compressed == null) {
+ return null;
+ }
+
+ final int uncompressedLength = compressed.getInt();
+ final long timestamp = compressed.getLong();
+ final int dataXXHash32 = compressed.getInt();
+
+ final ByteBuffer decompressed = this.decompress(this.ensureDirectBuffer(compressed), uncompressedLength);
+
+ DirectBufferReleaser.clean(compressed);
+
+ final IOException xxHash32CheckFailedEx = this.checkXXHash32(dataXXHash32, decompressed);
+ if (xxHash32CheckFailedEx != null) {
+ throw xxHash32CheckFailedEx;
+ }
+
+ return decompressed;
+ }
+
+ private @NotNull ByteBuffer ensureDirectBuffer(@NotNull ByteBuffer buffer) {
+ if (buffer.isDirect()) {
+ return buffer;
+ }
+
+ ByteBuffer direct = ByteBuffer.allocateDirect(buffer.remaining());
+ int originalPosition = buffer.position();
+ direct.put(buffer);
+ direct.flip();
+ buffer.position(originalPosition);
+
+ return direct;
+ }
+
+ private @NotNull ByteBuffer compress(@NotNull ByteBuffer input) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] compressed = com.github.luben.zstd.Zstd.compress(inputArray, this.compressionLevel);
+
+ ByteBuffer result = ByteBuffer.allocateDirect(compressed.length);
+ result.put(compressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Compression failed for input size: " + input.remaining(), e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @NotNull ByteBuffer decompress(@NotNull ByteBuffer input, int originalSize) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] decompressed = com.github.luben.zstd.Zstd.decompress(inputArray, originalSize);
+
+ if (decompressed.length != originalSize) {
+ throw new IOException("Decompression size mismatch: expected " +
+ originalSize + ", got " + decompressed.length);
+ }
+
+ ByteBuffer result = ByteBuffer.allocateDirect(originalSize);
+ result.put(decompressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Decompression failed", e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @Nullable IOException checkXXHash32(long originalXXHash32, @NotNull ByteBuffer input) {
+ final int oldPositionOfInput = input.position();
+ final int currentXXHash32 = this.xxHash32.hash(input, this.xxHash32Seed);
+ input.position(oldPositionOfInput);
+
+ if (originalXXHash32 != currentXXHash32) {
+ return new IOException("XXHash32 check failed ! Expected: " + originalXXHash32 + ",but got: " + currentXXHash32);
+ }
+
+ return null;
+ }
+
+ @Override
+ public Path getPath() {
+ return this.filePath;
+ }
+
+ @Override
+ public DataInputStream getChunkDataInputStream(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.readLock().lock();
+ try {
+ final ByteBuffer data = this.readChunk(pos.x, pos.z);
+
+ if (data == null) {
+ return null;
+ }
+
+ final byte[] dataBytes = new byte[data.remaining()];
+ data.get(dataBytes);
+
+ DirectBufferReleaser.clean(data);
+
+ return new DataInputStream(new ByteArrayInputStream(dataBytes));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean doesChunkExist(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public DataOutputStream getChunkDataOutputStream(ChunkPos pos) {
+ return new DataOutputStream(new ChunkBufferHelper(pos));
+ }
+
+ @Override
+ public void clear(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.clearChunkData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean hasChunk(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public void write(@NotNull ChunkPos pos, ByteBuffer buf) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.writeChunk(pos.x, pos.z, buf);
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ // MCC 的玩意,这东西也用不上给Linear了()
+ @Override
+ public CompoundTag getOversizedData(int x, int z) {
+ return null;
+ }
+
+ @Override
+ public boolean isOversized(int x, int z) {
+ return false;
+ }
+
+ @Override
+ public boolean recalculateHeader() {
+ return false;
+ }
+
+ @Override
+ public void setOversized(int x, int z, boolean oversized) {
+
+ }
+ // MCC end
+
+ @Override
+ public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) {
+ final DataOutputStream out = this.getChunkDataOutputStream(pos);
+
+ return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData(
+ data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE,
+ out, regionFile -> out.close()
+ );
+ }
+
+ @Override
+ public void flush() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.flushInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.closeInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ private class Sector{
+ private final int index;
+ private long offset;
+ private long length;
+ private boolean hasData = false;
+
+ private Sector(int index, long offset, long length) {
+ this.index = index;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public @NotNull ByteBuffer read(@NotNull FileChannel channel) throws IOException {
+ final ByteBuffer result = ByteBuffer.allocateDirect((int) this.length);
+
+ channel.read(result, this.offset);
+ result.flip();
+
+ return result;
+ }
+
+ public void store(@NotNull ByteBuffer newData, @NotNull FileChannel channel) throws IOException {
+ this.hasData = true;
+ this.length = newData.remaining();
+ this.offset = currentAcquiredIndex;
+
+ BufferedLinearRegionFile.this.currentAcquiredIndex += this.length;
+
+ long offset = this.offset;
+ while (newData.hasRemaining()) {
+ offset += channel.write(newData, offset);
+ }
+ }
+
+ private @NotNull ByteBuffer getEncoded() {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(sizeOfSingle());
+
+ buffer.putLong(this.offset);
+ buffer.putLong(this.length);
+ buffer.put((byte) (this.hasData ? 1 : 0));
+ buffer.flip();
+
+ return buffer;
+ }
+
+ public void restoreFrom(@NotNull ByteBuffer buffer) {
+ this.offset = buffer.getLong();
+ this.length = buffer.getLong();
+ this.hasData = buffer.get() == 1;
+
+ if (this.length < 0 || this.offset < 0) {
+ throw new IllegalStateException("Invalid sector data: " + this);
+ }
+ }
+
+ public void clear() {
+ this.hasData = false;
+ }
+
+ public boolean hasData() {
+ return this.hasData;
+ }
+
+ static int sizeOfSingle() {
+ // offset + length hasData
+ return Long.BYTES * 2 + 1;
+ }
+ }
+
+ private class ChunkBufferHelper extends ByteArrayOutputStream {
+ private final ChunkPos pos;
+
+ private ChunkBufferHelper(ChunkPos pos) {
+ this.pos = pos;
+ }
+
+ @Override
+ public void close() throws IOException {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().lock();
+ try {
+ ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count);
+
+ BufferedLinearRegionFile.this.writeChunk(this.pos.x, this.pos.z, bytebuffer);
+ }finally {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().unlock();
+ }
+ }
+ }
+}

View File

@@ -0,0 +1,37 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java
@@ -1,0 +_,34 @@
+package me.earthme.luminol.utils;
+
+import org.jetbrains.annotations.NotNull;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+
+public class DirectBufferReleaser {
+ private static final Method CLEANER_METHOD;
+ private static final Object UNSAFE;
+
+ static {
+ try {
+ Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
+ Field theUnsafe = unsafeClass.getDeclaredField("theUnsafe");
+ theUnsafe.setAccessible(true);
+ UNSAFE = theUnsafe.get(null);
+ CLEANER_METHOD = unsafeClass.getMethod("invokeCleaner", ByteBuffer.class);
+ } catch (Exception ex) {
+ throw new RuntimeException("Unsafe init failed", ex);
+ }
+ }
+
+ public static boolean clean(@NotNull ByteBuffer buffer) {
+ if (!buffer.isDirect()) return false;
+ try {
+ CLEANER_METHOD.invoke(UNSAFE, buffer);
+ return true;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+}

View File

@@ -1,16 +1,18 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java
@@ -1,0 +_,40 @@
@@ -1,0 +_,42 @@
+package me.earthme.luminol.utils;
+
+import abomination.LinearRegionFile;
+import me.earthme.luminol.config.modules.misc.RegionFormatConfig;
+import me.earthme.luminol.data.BufferedLinearRegionFile;
+import net.minecraft.world.level.chunk.storage.RegionFile;
+import org.jetbrains.annotations.Nullable;
+
+public enum EnumRegionFormat {
+ MCA("mca", "mca" , (info) -> new RegionFile(info.info(), info.filePath(), info.folder(), info.sync())),
+ LINEAR_V2("linear_v2", "linear" ,(info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel));
+ LINEAR_V2("linear_v2", "linear" ,(info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel)),
+ B_LINEAR("b_linear", "b_linear", (info) -> new BufferedLinearRegionFile(info.filePath(), RegionFormatConfig.linearCompressionLevel));
+
+ private final String name;
+ private final String argument;

View File

@@ -1,212 +0,0 @@
--- /dev/null
+++ b/src/main/java/net/caffeinemc/mods/lithium/common/world/chunk/LithiumHashPalette.java
@@ -1,0 +_,209 @@
+// Lithium - faster chunk serialization
+
+package net.caffeinemc.mods.lithium.common.world.chunk;
+
+import it.unimi.dsi.fastutil.HashCommon;
+import it.unimi.dsi.fastutil.objects.Reference2IntOpenHashMap;
+import net.minecraft.CrashReport;
+import net.minecraft.CrashReportCategory;
+import net.minecraft.ReportedException;
+import net.minecraft.core.IdMap;
+import net.minecraft.network.FriendlyByteBuf;
+import net.minecraft.network.VarInt;
+import net.minecraft.world.level.chunk.MissingPaletteEntryException;
+import net.minecraft.world.level.chunk.Palette;
+import net.minecraft.world.level.chunk.PaletteResize;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.function.Predicate;
+
+import static it.unimi.dsi.fastutil.Hash.FAST_LOAD_FACTOR;
+
+/**
+ * Generally provides better performance over the vanilla {@link net.minecraft.world.level.chunk.HashMapPalette} when calling
+ * {@link LithiumHashPalette#idFor(Object)} through using a faster backing map and reducing pointer chasing.
+ */
+public class LithiumHashPalette<T> implements Palette<T> {
+ private static final int ABSENT_VALUE = -1;
+
+ private final IdMap<T> idList;
+ private final PaletteResize<T> resizeHandler;
+ private final int indexBits;
+
+ private final Reference2IntOpenHashMap<T> table;
+ private T[] entries;
+ private int size = 0;
+
+ private LithiumHashPalette(IdMap<T> idList, PaletteResize<T> resizeHandler, int indexBits, T[] entries, Reference2IntOpenHashMap<T> table, int size) {
+ this.idList = idList;
+ this.resizeHandler = resizeHandler;
+ this.indexBits = indexBits;
+ this.entries = entries;
+ this.table = table;
+ this.size = size;
+ }
+
+ public LithiumHashPalette(IdMap<T> idList, int bits, PaletteResize<T> resizeHandler, List<T> list) {
+ this(idList, bits, resizeHandler);
+
+ for (T t : list) {
+ this.addEntry(t);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public LithiumHashPalette(IdMap<T> idList, int bits, PaletteResize<T> resizeHandler) {
+ this.idList = idList;
+ this.indexBits = bits;
+ this.resizeHandler = resizeHandler;
+
+ int capacity = 1 << bits;
+
+ this.entries = (T[]) new Object[capacity];
+ this.table = new Reference2IntOpenHashMap<>(capacity, FAST_LOAD_FACTOR);
+ this.table.defaultReturnValue(ABSENT_VALUE);
+ }
+
+ @Override
+ public int idFor(@NotNull T obj) {
+ int id = this.table.getInt(obj);
+
+ if (id == ABSENT_VALUE) {
+ id = this.computeEntry(obj);
+ }
+
+ return id;
+ }
+
+ @Override
+ public boolean maybeHas(@NotNull Predicate<T> predicate) {
+ for (int i = 0; i < this.size; ++i) {
+ if (predicate.test(this.entries[i])) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private int computeEntry(T obj) {
+ int id = this.addEntry(obj);
+
+ if (id >= 1 << this.indexBits) {
+ if (this.resizeHandler == null) {
+ throw new IllegalStateException("Cannot grow");
+ } else {
+ id = this.resizeHandler.onResize(this.indexBits + 1, obj);
+ }
+ }
+
+ return id;
+ }
+
+ private int addEntry(T obj) {
+ int nextId = this.size;
+
+ if (nextId >= this.entries.length) {
+ this.resize(this.size);
+ }
+
+ this.table.put(obj, nextId);
+ this.entries[nextId] = obj;
+
+ this.size++;
+
+ return nextId;
+ }
+
+ private void resize(int neededCapacity) {
+ this.entries = Arrays.copyOf(this.entries, HashCommon.nextPowerOfTwo(neededCapacity + 1));
+ }
+
+ @Override
+ public @NotNull T valueFor(int id) {
+ T[] entries = this.entries;
+
+ T entry = null;
+ if (id >= 0 && id < entries.length) {
+ entry = entries[id];
+ }
+
+ if (entry != null) {
+ return entry;
+ } else {
+ throw this.missingPaletteEntryCrash(id);
+ }
+ }
+
+ private ReportedException missingPaletteEntryCrash(int id) {
+ try {
+ throw new MissingPaletteEntryException(id);
+ } catch (MissingPaletteEntryException e) {
+ CrashReport crashReport = CrashReport.forThrowable(e, "[Lithium] Getting Palette Entry");
+ CrashReportCategory crashReportCategory = crashReport.addCategory("Chunk section");
+ crashReportCategory.setDetail("IndexBits", this.indexBits);
+ crashReportCategory.setDetail("Entries", this.entries.length + " Elements: " + Arrays.toString(this.entries));
+ crashReportCategory.setDetail("Table", this.table.size() + " Elements: " + this.table);
+ return new ReportedException(crashReport);
+ }
+ }
+
+ @Override
+ public void read(FriendlyByteBuf buf) {
+ this.clear();
+
+ int entryCount = buf.readVarInt();
+
+ for (int i = 0; i < entryCount; ++i) {
+ this.addEntry(this.idList.byIdOrThrow(buf.readVarInt()));
+ }
+ }
+
+ @Override
+ public void write(FriendlyByteBuf buf) {
+ int size = this.size;
+ buf.writeVarInt(size);
+
+ for (int i = 0; i < size; ++i) {
+ buf.writeVarInt(this.idList.getId(this.valueFor(i)));
+ }
+ }
+
+ @Override
+ public int getSerializedSize() {
+ int size = VarInt.getByteSize(this.size);
+
+ for (int i = 0; i < this.size; ++i) {
+ size += VarInt.getByteSize(this.idList.getId(this.valueFor(i)));
+ }
+
+ return size;
+ }
+
+ @Override
+ public int getSize() {
+ return this.size;
+ }
+
+ @Override
+ public @NotNull Palette<T> copy(@NotNull PaletteResize<T> resizeHandler) {
+ return new LithiumHashPalette<>(this.idList, resizeHandler, this.indexBits, this.entries.clone(), this.table.clone(), this.size);
+ }
+
+ private void clear() {
+ Arrays.fill(this.entries, null);
+ this.table.clear();
+ this.size = 0;
+ }
+
+ public List<T> getElements() {
+ T[] copy = Arrays.copyOf(this.entries, this.size);
+ return Arrays.asList(copy);
+ }
+
+ public static <A> Palette<A> create(int bits, IdMap<A> idList, PaletteResize<A> listener, List<A> list) {
+ return new LithiumHashPalette<>(idList, bits, listener, list);
+ }
+}