diff --git a/luminol-server/minecraft-patches/features/0063-Paper-Backport-fix-for-MC-296337.patch b/luminol-server/minecraft-patches/features/0056-Paper-Backport-fix-for-MC-296337.patch similarity index 95% rename from luminol-server/minecraft-patches/features/0063-Paper-Backport-fix-for-MC-296337.patch rename to luminol-server/minecraft-patches/features/0056-Paper-Backport-fix-for-MC-296337.patch index 7800672..08759d3 100644 --- a/luminol-server/minecraft-patches/features/0063-Paper-Backport-fix-for-MC-296337.patch +++ b/luminol-server/minecraft-patches/features/0056-Paper-Backport-fix-for-MC-296337.patch @@ -10,7 +10,7 @@ Original patch license: GPLv3 (https://github.com/PaperMC/Paper/blob/main/licens Co-authored-by: electronicboy diff --git a/net/minecraft/world/entity/Entity.java b/net/minecraft/world/entity/Entity.java -index b163c43f5398b9f38c75ae7af6a3015b686624ce..0d509b187c0ec357a7ccdf1e8b1f56d1e44398b9 100644 +index 14e58b4093b4eafb989f9b1d498579bd25ab6c1e..0814ad6820b08a856f7bd261e96143568e772ac4 100644 --- a/net/minecraft/world/entity/Entity.java +++ b/net/minecraft/world/entity/Entity.java @@ -361,6 +361,11 @@ public abstract class Entity implements SyncedDataHolder, Nameable, EntityAccess diff --git a/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java.patch b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java.patch new file mode 100644 index 0000000..613e6d1 --- /dev/null +++ b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java.patch @@ -0,0 +1,632 @@ +--- /dev/null ++++ b/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java +@@ -1,0 +_,629 @@ ++package me.earthme.luminol.data; ++ ++import abomination.IRegionFile; ++import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO; ++import me.earthme.luminol.utils.DirectBufferReleaser; ++import net.jpountz.xxhash.XXHash32; ++import net.jpountz.xxhash.XXHashFactory; ++import net.minecraft.nbt.CompoundTag; ++import net.minecraft.world.level.ChunkPos; ++import org.jetbrains.annotations.NotNull; ++import org.jetbrains.annotations.Nullable; ++ ++import java.io.*; ++import java.nio.ByteBuffer; ++import java.nio.channels.FileChannel; ++import java.nio.file.Files; ++import java.nio.file.Path; ++import java.nio.file.StandardOpenOption; ++import java.util.concurrent.locks.ReadWriteLock; ++import java.util.concurrent.locks.ReentrantReadWriteLock; ++ ++public class BufferedLinearRegionFile implements IRegionFile { ++ private static final double AUTO_COMPACT_PERCENT = 3.0 / 5.0; // 60 % ++ private static final long AUTO_COMPACT_SIZE = 1024 * 1024; // 1 MiB ++ ++ private static final long SUPER_BLOCK = 0x1145141919810L; ++ private static final int HASH_SEED = 0x0721; // ~(∠・ω< )⌒★ ++ private static final byte VERSION = 0x01; // ver 1.0 ++ ++ private final Path filePath; ++ ++ private final ReadWriteLock fileAccessLock = new ReentrantReadWriteLock(); ++ private final XXHash32 xxHash32 = XXHashFactory.fastestInstance().hash32(); ++ private final Sector[] sectors = new Sector[1024]; ++ private long currentAcquiredIndex = this.headerSize(); ++ private byte compressionLevel = 6; ++ private int xxHash32Seed = HASH_SEED; ++ private FileChannel channel; ++ ++ public BufferedLinearRegionFile(Path filePath, int compressionLevel) throws IOException { ++ this(filePath); ++ ++ this.compressionLevel = (byte) compressionLevel; ++ } ++ ++ public BufferedLinearRegionFile(Path filePath) throws IOException { ++ this.channel = FileChannel.open( ++ filePath, ++ StandardOpenOption.CREATE, ++ StandardOpenOption.WRITE, ++ StandardOpenOption.READ ++ ); ++ this.filePath = filePath; ++ ++ // fill default sectors ++ for (int i = 0; i < 1024; i++) { ++ this.sectors[i] = new Sector(i, this.headerSize(), 0); ++ } ++ ++ // load sectors ++ this.readHeaders(); ++ } ++ ++ private void readHeaders() throws IOException { ++ if (this.channel.size() < this.headerSize()) { ++ return; ++ } ++ ++ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize()); ++ this.channel.read(buffer, 0); ++ buffer.flip(); ++ ++ if (buffer.getLong() != SUPER_BLOCK || buffer.get() != VERSION) { ++ throw new IOException("Invalid file format or version mismatch"); ++ } ++ ++ this.compressionLevel = buffer.get(); // Compression level (not used) ++ this.xxHash32Seed = buffer.getInt(); // XXHash32 seed ++ this.currentAcquiredIndex = buffer.getLong(); // Acquired index ++ ++ for (Sector sector : this.sectors) { ++ sector.restoreFrom(buffer); ++ if (sector.hasData()) { ++ // recompute if acquired index is corrupted ++ this.currentAcquiredIndex = Math.max(this.currentAcquiredIndex, sector.offset + sector.length); ++ } ++ } ++ ++ DirectBufferReleaser.clean(buffer); ++ } ++ ++ private void writeHeaders() throws IOException { ++ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize()); ++ ++ buffer.putLong(SUPER_BLOCK); // Magic ++ buffer.put(VERSION); // Version ++ buffer.put(this.compressionLevel); // Compression level ++ buffer.putInt(this.xxHash32Seed); // XXHash32 seed ++ buffer.putLong(this.currentAcquiredIndex); // Acquired index ++ ++ for (Sector sector : this.sectors) { ++ // encode each sector ++ buffer.put(sector.getEncoded()); ++ } ++ ++ buffer.flip(); ++ ++ long offset = 0; ++ while (buffer.hasRemaining()) { ++ offset += this.channel.write(buffer, offset); ++ } ++ ++ DirectBufferReleaser.clean(buffer); ++ } ++ ++ private int sectorSize() { ++ return this.sectors.length * Sector.sizeOfSingle(); ++ } ++ ++ private int headerSize() { ++ int result = 0; ++ ++ result += Long.BYTES; // Magic ++ result += Byte.BYTES; // Version ++ result += Byte.BYTES; // Compression level ++ result += Integer.BYTES; // XXHash32 seed ++ result += Long.BYTES; // Acquired index ++ result += this.sectorSize(); // Sectors ++ ++ return result; ++ } ++ ++ private void flushInternal() throws IOException { ++ // save headers ++ this.writeHeaders(); ++ ++ long spareSize = this.channel.size(); ++ ++ spareSize -= this.headerSize(); ++ for (Sector sector : this.sectors) { ++ spareSize -= sector.length; ++ } ++ ++ long sectorSize = 0; ++ for (Sector sector : this.sectors) { ++ sectorSize += sector.length; ++ } ++ ++ // try auto compact to clean the garbage area ++ if (spareSize > AUTO_COMPACT_SIZE && (double)spareSize > ((double)sectorSize) * AUTO_COMPACT_PERCENT) { ++ this.compact(); ++ } ++ } ++ ++ private void closeInternal() throws IOException { ++ this.writeHeaders(); ++ this.channel.force(true); ++ // force compact ++ this.compact(); ++ this.channel.close(); ++ } ++ ++ private void compact() throws IOException { ++ this.writeHeaders(); // save headers for compact ++ this.channel.force(true); ++ try (FileChannel tempChannel = FileChannel.open( ++ new File(this.filePath.toString() + ".tmp").toPath(), ++ StandardOpenOption.CREATE, ++ StandardOpenOption.WRITE, ++ StandardOpenOption.READ ++ )){ ++ // get the latest head in file ++ final ByteBuffer headerBuffer = ByteBuffer.allocateDirect(this.headerSize()); ++ this.channel.read(headerBuffer, 0); ++ headerBuffer.flip(); ++ ++ long offsetHeader = 0; ++ while (headerBuffer.hasRemaining()) { ++ offsetHeader += tempChannel.write(headerBuffer, offsetHeader); ++ } ++ DirectBufferReleaser.clean(headerBuffer); ++ ++ int offsetPointer = this.headerSize(); ++ for (Sector sector : this.sectors) { ++ // skip cleared or no data-contained sectors ++ if (!sector.hasData()) { ++ continue; ++ } ++ ++ // only read the available data ++ final ByteBuffer sectorData = sector.read(this.channel); ++ final int length = sectorData.remaining(); ++ ++ // recalculate the offset and length ++ final Sector newRecalculated = new Sector(sector.index, offsetPointer, length); ++ offsetPointer += length; ++ this.sectors[sector.index] = newRecalculated; // update sector infos ++ ++ newRecalculated.hasData = true; ++ ++ long offset = newRecalculated.offset; ++ while (sectorData.hasRemaining()) { ++ offset += tempChannel.write(sectorData, offset); ++ } ++ ++ DirectBufferReleaser.clean(sectorData); ++ } ++ ++ tempChannel.force(true); ++ this.currentAcquiredIndex = tempChannel.size(); ++ } ++ ++ Files.move( ++ new File(this.filePath.toString() + ".tmp").toPath(), ++ this.filePath, ++ java.nio.file.StandardCopyOption.REPLACE_EXISTING ++ ); ++ ++ this.reopenChannel(); ++ this.writeHeaders(); ++ } ++ ++ private void reopenChannel() throws IOException { ++ if (this.channel.isOpen()) { ++ this.channel.close(); ++ } ++ ++ this.channel = FileChannel.open( ++ filePath, ++ StandardOpenOption.CREATE, ++ StandardOpenOption.WRITE, ++ StandardOpenOption.READ ++ ); ++ } ++ ++ private void writeChunkDataRaw(int chunkOrdinal, ByteBuffer chunkData) throws IOException { ++ final Sector sector = this.sectors[chunkOrdinal]; ++ ++ sector.store(chunkData, this.channel); ++ } ++ ++ private @Nullable ByteBuffer readChunkDataRaw(int chunkOrdinal) throws IOException { ++ final Sector sector = this.sectors[chunkOrdinal]; ++ ++ if (!sector.hasData()) { ++ return null; ++ } ++ ++ return sector.read(this.channel); ++ } ++ ++ private void clearChunkData(int chunkOrdinal) throws IOException { ++ final Sector sector = this.sectors[chunkOrdinal]; ++ ++ sector.clear(); ++ ++ this.writeHeaders(); ++ } ++ ++ private static int getChunkIndex(int x, int z) { ++ return (x & 31) + ((z & 31) << 5); ++ } ++ ++ private boolean hasData(int chunkOriginal) { ++ return this.sectors[chunkOriginal].hasData(); ++ } ++ ++ private void writeChunk(int x, int z, @NotNull ByteBuffer data) throws IOException { ++ final int chunkIndex = getChunkIndex(x, z); ++ ++ final int oldPositionOfData = data.position(); ++ final int xxHash32OfData = this.xxHash32.hash(data, this.xxHash32Seed); ++ data.position(oldPositionOfData); ++ ++ final ByteBuffer compressedData = this.compress(this.ensureDirectBuffer(data)); ++ // uncompressed length + timestamp + xxhash32 ++ final ByteBuffer chunkSectionBuilder = ByteBuffer.allocateDirect(compressedData.remaining() + 4 + 8 + 4); ++ ++ chunkSectionBuilder.putInt(data.remaining()); // Uncompressed length ++ chunkSectionBuilder.putLong(System.nanoTime()); // Timestamp ++ chunkSectionBuilder.putInt(xxHash32OfData); // xxHash32 of the original data ++ chunkSectionBuilder.put(compressedData); // Compressed data ++ chunkSectionBuilder.flip(); ++ ++ this.writeChunkDataRaw(chunkIndex, chunkSectionBuilder); ++ DirectBufferReleaser.clean(chunkSectionBuilder); ++ } ++ ++ private @Nullable ByteBuffer readChunk(int x, int z) throws IOException { ++ final ByteBuffer compressed = this.readChunkDataRaw(getChunkIndex(x, z)); ++ ++ if (compressed == null) { ++ return null; ++ } ++ ++ final int uncompressedLength = compressed.getInt(); // compressed length ++ final long timestamp = compressed.getLong(); // TODO use this timestamp for something? ++ final int dataXXHash32 = compressed.getInt(); // XXHash32 for validation ++ ++ final ByteBuffer decompressed = this.decompress(this.ensureDirectBuffer(compressed), uncompressedLength); ++ ++ DirectBufferReleaser.clean(compressed); ++ ++ final IOException xxHash32CheckFailedEx = this.checkXXHash32(dataXXHash32, decompressed); ++ if (xxHash32CheckFailedEx != null) { ++ throw xxHash32CheckFailedEx; // prevent from loading ++ } ++ ++ return decompressed; ++ } ++ ++ private @NotNull ByteBuffer ensureDirectBuffer(@NotNull ByteBuffer buffer) { ++ if (buffer.isDirect()) { ++ return buffer; ++ } ++ ++ ByteBuffer direct = ByteBuffer.allocateDirect(buffer.remaining()); ++ int originalPosition = buffer.position(); ++ direct.put(buffer); ++ direct.flip(); ++ buffer.position(originalPosition); ++ ++ return direct; ++ } ++ ++ private @NotNull ByteBuffer compress(@NotNull ByteBuffer input) throws IOException { ++ final int originalPosition = input.position(); ++ final int originalLimit = input.limit(); ++ ++ try { ++ byte[] inputArray; ++ int inputLength = input.remaining(); ++ if (input.hasArray()) { ++ inputArray = input.array(); ++ int arrayOffset = input.arrayOffset() + input.position(); ++ if (arrayOffset != 0 || inputLength != inputArray.length) { ++ byte[] temp = new byte[inputLength]; ++ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength); ++ inputArray = temp; ++ } ++ } else { ++ inputArray = new byte[inputLength]; ++ input.get(inputArray); ++ input.position(originalPosition); ++ } ++ ++ byte[] compressed = com.github.luben.zstd.Zstd.compress(inputArray, this.compressionLevel); ++ ++ ByteBuffer result = ByteBuffer.allocateDirect(compressed.length); ++ result.put(compressed); ++ result.flip(); ++ ++ return result; ++ ++ } catch (Exception e) { ++ throw new IOException("Compression failed for input size: " + input.remaining(), e); ++ } finally { ++ input.position(originalPosition); ++ input.limit(originalLimit); ++ } ++ } ++ ++ private @NotNull ByteBuffer decompress(@NotNull ByteBuffer input, int originalSize) throws IOException { ++ final int originalPosition = input.position(); ++ final int originalLimit = input.limit(); ++ ++ try { ++ byte[] inputArray; ++ int inputLength = input.remaining(); ++ ++ if (input.hasArray()) { ++ inputArray = input.array(); ++ int arrayOffset = input.arrayOffset() + input.position(); ++ if (arrayOffset != 0 || inputLength != inputArray.length) { ++ byte[] temp = new byte[inputLength]; ++ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength); ++ inputArray = temp; ++ } ++ } else { ++ inputArray = new byte[inputLength]; ++ input.get(inputArray); ++ input.position(originalPosition); ++ } ++ ++ byte[] decompressed = com.github.luben.zstd.Zstd.decompress(inputArray, originalSize); ++ ++ if (decompressed.length != originalSize) { ++ throw new IOException("Decompression size mismatch: expected " + ++ originalSize + ", got " + decompressed.length); ++ } ++ ++ ByteBuffer result = ByteBuffer.allocateDirect(originalSize); ++ result.put(decompressed); ++ result.flip(); ++ ++ return result; ++ ++ } catch (Exception e) { ++ throw new IOException("Decompression failed", e); ++ } finally { ++ input.position(originalPosition); ++ input.limit(originalLimit); ++ } ++ } ++ ++ private @Nullable IOException checkXXHash32(long originalXXHash32, @NotNull ByteBuffer input) { ++ final int oldPositionOfInput = input.position(); ++ final int currentXXHash32 = this.xxHash32.hash(input, this.xxHash32Seed); ++ input.position(oldPositionOfInput); ++ ++ if (originalXXHash32 != currentXXHash32) { ++ return new IOException("XXHash32 check failed ! Expected: " + originalXXHash32 + ",but got: " + currentXXHash32); ++ } ++ ++ return null; ++ } ++ ++ @Override ++ public Path getPath() { ++ return this.filePath; ++ } ++ ++ @Override ++ public DataInputStream getChunkDataInputStream(@NotNull ChunkPos pos) throws IOException { ++ this.fileAccessLock.readLock().lock(); ++ try { ++ final ByteBuffer data = this.readChunk(pos.x, pos.z); ++ ++ if (data == null) { ++ return null; ++ } ++ ++ final byte[] dataBytes = new byte[data.remaining()]; ++ data.get(dataBytes); ++ ++ DirectBufferReleaser.clean(data); ++ ++ return new DataInputStream(new ByteArrayInputStream(dataBytes)); ++ }finally { ++ this.fileAccessLock.readLock().unlock(); ++ } ++ } ++ ++ @Override ++ public boolean doesChunkExist(@NotNull ChunkPos pos) { ++ this.fileAccessLock.readLock().lock(); ++ try { ++ return this.hasData(getChunkIndex(pos.x, pos.z)); ++ }finally { ++ this.fileAccessLock.readLock().unlock(); ++ } ++ } ++ ++ @Override ++ public DataOutputStream getChunkDataOutputStream(ChunkPos pos) { ++ return new DataOutputStream(new ChunkBufferHelper(pos)); ++ } ++ ++ @Override ++ public void clear(@NotNull ChunkPos pos) throws IOException { ++ this.fileAccessLock.writeLock().lock(); ++ try { ++ this.clearChunkData(getChunkIndex(pos.x, pos.z)); ++ }finally { ++ this.fileAccessLock.writeLock().unlock(); ++ } ++ } ++ ++ @Override ++ public boolean hasChunk(@NotNull ChunkPos pos) { ++ this.fileAccessLock.readLock().lock(); ++ try { ++ return this.hasData(getChunkIndex(pos.x, pos.z)); ++ }finally { ++ this.fileAccessLock.readLock().unlock(); ++ } ++ } ++ ++ @Override ++ public void write(@NotNull ChunkPos pos, ByteBuffer buf) throws IOException { ++ this.fileAccessLock.writeLock().lock(); ++ try { ++ this.writeChunk(pos.x, pos.z, buf); ++ }finally { ++ this.fileAccessLock.writeLock().unlock(); ++ } ++ } ++ ++ // MCC 的玩意,这东西也用不上给Linear了() ++ @Override ++ public CompoundTag getOversizedData(int x, int z) { ++ return null; ++ } ++ ++ @Override ++ public boolean isOversized(int x, int z) { ++ return false; ++ } ++ ++ @Override ++ public boolean recalculateHeader() { ++ return false; ++ } ++ ++ @Override ++ public void setOversized(int x, int z, boolean oversized) { ++ ++ } ++ // MCC end ++ ++ @Override ++ public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) { ++ final DataOutputStream out = this.getChunkDataOutputStream(pos); ++ ++ return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData( ++ data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE, ++ out, regionFile -> out.close() ++ ); ++ } ++ ++ @Override ++ public void flush() throws IOException { ++ this.fileAccessLock.writeLock().lock(); ++ try { ++ this.flushInternal(); ++ }finally { ++ this.fileAccessLock.writeLock().unlock(); ++ } ++ } ++ ++ @Override ++ public void close() throws IOException { ++ this.fileAccessLock.writeLock().lock(); ++ try { ++ this.closeInternal(); ++ }finally { ++ this.fileAccessLock.writeLock().unlock(); ++ } ++ } ++ ++ private class Sector{ ++ private final int index; ++ private long offset; ++ private long length; ++ private boolean hasData = false; ++ ++ private Sector(int index, long offset, long length) { ++ this.index = index; ++ this.offset = offset; ++ this.length = length; ++ } ++ ++ public @NotNull ByteBuffer read(@NotNull FileChannel channel) throws IOException { ++ final ByteBuffer result = ByteBuffer.allocateDirect((int) this.length); ++ ++ channel.read(result, this.offset); ++ result.flip(); ++ ++ return result; ++ } ++ ++ public void store(@NotNull ByteBuffer newData, @NotNull FileChannel channel) throws IOException { ++ this.hasData = true; ++ this.length = newData.remaining(); ++ this.offset = currentAcquiredIndex; ++ ++ BufferedLinearRegionFile.this.currentAcquiredIndex += this.length; ++ ++ long offset = this.offset; ++ while (newData.hasRemaining()) { ++ offset += channel.write(newData, offset); ++ } ++ } ++ ++ private @NotNull ByteBuffer getEncoded() { ++ final ByteBuffer buffer = ByteBuffer.allocateDirect(sizeOfSingle()); ++ ++ buffer.putLong(this.offset); ++ buffer.putLong(this.length); ++ buffer.put((byte) (this.hasData ? 1 : 0)); ++ buffer.flip(); ++ ++ return buffer; ++ } ++ ++ public void restoreFrom(@NotNull ByteBuffer buffer) { ++ this.offset = buffer.getLong(); ++ this.length = buffer.getLong(); ++ this.hasData = buffer.get() == 1; ++ ++ if (this.length < 0 || this.offset < 0) { ++ throw new IllegalStateException("Invalid sector data: " + this); ++ } ++ } ++ ++ public void clear() { ++ this.hasData = false; ++ } ++ ++ public boolean hasData() { ++ return this.hasData; ++ } ++ ++ static int sizeOfSingle() { ++ // offset + length hasData ++ return Long.BYTES * 2 + 1; ++ } ++ } ++ ++ private class ChunkBufferHelper extends ByteArrayOutputStream { ++ private final ChunkPos pos; ++ ++ private ChunkBufferHelper(ChunkPos pos) { ++ this.pos = pos; ++ } ++ ++ @Override ++ public void close() throws IOException { ++ BufferedLinearRegionFile.this.fileAccessLock.writeLock().lock(); ++ try { ++ ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count); ++ ++ BufferedLinearRegionFile.this.writeChunk(this.pos.x, this.pos.z, bytebuffer); ++ }finally { ++ BufferedLinearRegionFile.this.fileAccessLock.writeLock().unlock(); ++ } ++ } ++ } ++} diff --git a/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java.patch b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java.patch new file mode 100644 index 0000000..acb0387 --- /dev/null +++ b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java.patch @@ -0,0 +1,37 @@ +--- /dev/null ++++ b/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java +@@ -1,0 +_,34 @@ ++package me.earthme.luminol.utils; ++ ++import org.jetbrains.annotations.NotNull; ++ ++import java.lang.reflect.Field; ++import java.lang.reflect.Method; ++import java.nio.ByteBuffer; ++ ++public class DirectBufferReleaser { ++ private static final Method CLEANER_METHOD; ++ private static final Object UNSAFE; ++ ++ static { ++ try { ++ Class unsafeClass = Class.forName("sun.misc.Unsafe"); ++ Field theUnsafe = unsafeClass.getDeclaredField("theUnsafe"); ++ theUnsafe.setAccessible(true); ++ UNSAFE = theUnsafe.get(null); ++ CLEANER_METHOD = unsafeClass.getMethod("invokeCleaner", ByteBuffer.class); ++ } catch (Exception ex) { ++ throw new RuntimeException("Unsafe init failed", ex); ++ } ++ } ++ ++ public static boolean clean(@NotNull ByteBuffer buffer) { ++ if (!buffer.isDirect()) return false; ++ try { ++ CLEANER_METHOD.invoke(UNSAFE, buffer); ++ return true; ++ } catch (Exception e) { ++ return false; ++ } ++ } ++} diff --git a/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java.patch b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java.patch index 0840fbb..2b888fd 100644 --- a/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java.patch +++ b/luminol-server/paper-patches/files/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java.patch @@ -1,16 +1,18 @@ --- /dev/null +++ b/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java -@@ -1,0 +_,40 @@ +@@ -1,0 +_,42 @@ +package me.earthme.luminol.utils; + +import abomination.LinearRegionFile; +import me.earthme.luminol.config.modules.misc.RegionFormatConfig; ++import me.earthme.luminol.data.BufferedLinearRegionFile; +import net.minecraft.world.level.chunk.storage.RegionFile; +import org.jetbrains.annotations.Nullable; + +public enum EnumRegionFormat { + MCA("mca", "mca", (info) -> new RegionFile(info.info(), info.filePath(), info.folder(), info.sync())), -+ LINEAR_V2("linear_v2", "linear", (info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel)); ++ LINEAR_V2("linear_v2", "linear", (info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel)), ++ B_LINEAR("b_linear", "b_linear", (info) -> new BufferedLinearRegionFile(info.filePath(), RegionFormatConfig.linearCompressionLevel)); + + private final String name; + private final String argument;