Compare commits

...

4 Commits

Author SHA1 Message Date
MrHua269
8d6d9119a8 Buffered linear region format 1.0 2025-06-13 17:50:42 +08:00
Bacteriawa
40a455c92f Updated Upstream (Folia) 2025-06-13 02:32:12 +08:00
Bacteriawa
23b9e44ff3 Merge branch 'dev/1.21.4-hardfork' of https://github.com/LuminolMC/Luminol into dev/1.21.4-hardfork 2025-06-13 02:29:33 +08:00
Bacteriawa
da25676332 Remove faster chunk serialization 2025-05-27 19:45:24 +08:00
6 changed files with 665 additions and 5 deletions

View File

@@ -2,7 +2,7 @@ group = me.earthme.luminol
version=1.21.4-R0.1-SNAPSHOT
mcVersion=1.21.4
foliaRef=f69d1c974daa7646b869d4c11dcc86915eec96a8
foliaRef=b785bccd6fd270b4897a165772124eef55d58c6c
org.gradle.configuration-cache=true
org.gradle.caching=true

View File

@@ -22,7 +22,7 @@ index a814512fcfb85312474ae2c2c21443843bf57831..2e084a5b28cbe4737f48c25e10af5892
public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(
final int chunkX, final int chunkZ, final CompoundTag compound
diff --git a/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java b/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
index 98fbc5c8044bd945d64569f13412a6e7e49a4e7f..9709d60d3b2a5478cd6c711669e32f28ac105667 100644
index b5c300f867f4f9670f51b6082982e9363e7a7326..8bafd7ac9706dbf6c43a136733a63ecc3ffb430d 100644
--- a/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
+++ b/ca/spottedleaf/moonrise/patches/chunk_system/io/MoonriseRegionFileIO.java
@@ -1260,7 +1260,7 @@ public final class MoonriseRegionFileIO {

View File

@@ -5,7 +5,7 @@ Subject: [PATCH] SparklyPaper Optimize canSee checks
diff --git a/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java b/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
index 3819c748473f2cbf53890867e552985eeb8ee35f..8cb02f08ccddd69ad3ea868e848c61dfb4fa5fe7 100644
index b6e451c8a41a1d25bac679d18156d6e813efda2a..48c85729a6de20b554325ca29c2448d515bbc7a7 100644
--- a/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
+++ b/src/main/java/org/bukkit/craftbukkit/entity/CraftPlayer.java
@@ -210,7 +210,7 @@ public class CraftPlayer extends CraftHumanEntity implements Player {

View File

@@ -0,0 +1,621 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java
@@ -1,0 +_,618 @@
+package me.earthme.luminol.data;
+
+import abomination.IRegionFile;
+import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO;
+import me.earthme.luminol.utils.DirectBufferReleaser;
+import net.jpountz.xxhash.XXHash32;
+import net.jpountz.xxhash.XXHashFactory;
+import net.minecraft.nbt.CompoundTag;
+import net.minecraft.world.level.ChunkPos;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+public class BufferedLinearRegionFile implements IRegionFile {
+ private static final double AUTO_COMPACT_PERCENT = 3.0 / 5.0; // 60 %
+ private static final long AUTO_COMPACT_SIZE = 1024 * 1024; // 1 MiB
+
+ private static final long SUPER_BLOCK = 0x1145141919810L;
+ private static final int HASH_SEED = 0x0721;
+ private static final byte VERSION = 0x01;
+
+ private final Path filePath;
+
+ private final ReadWriteLock fileAccessLock = new ReentrantReadWriteLock();
+ private final XXHash32 xxHash32 = XXHashFactory.fastestInstance().hash32();
+ private final Sector[] sectors = new Sector[1024];
+ private long currentAcquiredIndex = this.headerSize();
+ private byte compressionLevel = 6;
+ private int xxHash32Seed = HASH_SEED;
+ private FileChannel channel;
+
+ public BufferedLinearRegionFile(Path filePath, int compressionLevel) throws IOException {
+ this(filePath);
+ this.compressionLevel = (byte) compressionLevel;
+ }
+
+ public BufferedLinearRegionFile(Path filePath) throws IOException {
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ this.filePath = filePath;
+
+ for (int i = 0; i < 1024; i++) {
+ this.sectors[i] = new Sector(i, this.headerSize(), 0);
+ }
+
+ this.readHeaders();
+ }
+
+ private void readHeaders() throws IOException {
+ if (this.channel.size() < this.headerSize()) {
+ return;
+ }
+
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(buffer, 0);
+ buffer.flip();
+
+ if (buffer.getLong() != SUPER_BLOCK || buffer.get() != VERSION) {
+ throw new IOException("Invalid file format or version mismatch");
+ }
+
+ this.compressionLevel = buffer.get(); // Compression level (not used)
+ this.xxHash32Seed = buffer.getInt(); // XXHash32 seed
+ this.currentAcquiredIndex = buffer.getLong(); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ sector.restoreFrom(buffer);
+ if (sector.hasData()) {
+ this.currentAcquiredIndex = Math.max(this.currentAcquiredIndex, sector.offset + sector.length);
+ }
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private void writeHeaders() throws IOException {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+
+ buffer.putLong(SUPER_BLOCK); // Magic
+ buffer.put(VERSION); // Version
+ buffer.put(this.compressionLevel); // Compression level
+ buffer.putInt(this.xxHash32Seed); // XXHash32 seed
+ buffer.putLong(this.currentAcquiredIndex); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ buffer.put(sector.getEncoded());
+ }
+
+ buffer.flip();
+
+ long offset = 0;
+ while (buffer.hasRemaining()) {
+ offset += this.channel.write(buffer, offset);
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private int sectorSize() {
+ return this.sectors.length * Sector.sizeOfSingle();
+ }
+
+ private int headerSize() {
+ int result = 0;
+
+ result += Long.BYTES; // Magic
+ result += Byte.BYTES; // Version
+ result += Byte.BYTES; // Compression level
+ result += Integer.BYTES; // XXHash32 seed
+ result += Long.BYTES; // Acquired index
+ result += this.sectorSize(); // Sectors
+
+ return result;
+ }
+
+ private void flushInternal() throws IOException {
+ this.writeHeaders();
+
+ long spareSize = this.channel.size();
+
+ spareSize -= this.headerSize();
+ for (Sector sector : this.sectors) {
+ spareSize -= sector.length;
+ }
+
+ long sectorSize = 0;
+ for (Sector sector : this.sectors) {
+ sectorSize += sector.length;
+ }
+
+ if (spareSize > AUTO_COMPACT_SIZE && (double)spareSize > ((double)sectorSize) * AUTO_COMPACT_PERCENT) {
+ this.compact();
+ }
+ }
+
+ private void closeInternal() throws IOException {
+ this.writeHeaders();
+ this.channel.force(true);
+ this.compact();
+ this.channel.close();
+ }
+
+ private void compact() throws IOException {
+ this.writeHeaders(); // save headers for compact
+ this.channel.force(true);
+ try (FileChannel tempChannel = FileChannel.open(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ )){
+ // get the latest head in file
+ final ByteBuffer headerBuffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(headerBuffer, 0);
+ headerBuffer.flip();
+
+ long offsetHeader = 0;
+ while (headerBuffer.hasRemaining()) {
+ offsetHeader += tempChannel.write(headerBuffer, offsetHeader);
+ }
+ DirectBufferReleaser.clean(headerBuffer);
+
+ int offsetPointer = this.headerSize();
+ for (Sector sector : this.sectors) {
+ if (!sector.hasData()) {
+ continue;
+ }
+
+ final ByteBuffer sectorData = sector.read(this.channel);
+ final int length = sectorData.remaining();
+
+ final Sector newRecalculated = new Sector(sector.index, offsetPointer, length);
+ offsetPointer += length;
+ this.sectors[sector.index] = newRecalculated; // update sector infos
+
+ newRecalculated.hasData = true;
+
+ long offset = newRecalculated.offset;
+ while (sectorData.hasRemaining()) {
+ offset += tempChannel.write(sectorData, offset);
+ }
+
+ DirectBufferReleaser.clean(sectorData);
+ }
+
+ tempChannel.force(true);
+ this.currentAcquiredIndex = tempChannel.size();
+ }
+
+ Files.move(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ this.filePath,
+ java.nio.file.StandardCopyOption.REPLACE_EXISTING
+ );
+
+ this.reopenChannel();
+ this.writeHeaders();
+ }
+
+ private void reopenChannel() throws IOException {
+ if (this.channel.isOpen()) {
+ this.channel.close();
+ }
+
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ }
+
+ private void writeChunkDataRaw(int chunkOrdinal, ByteBuffer chunkData) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.store(chunkData, this.channel);
+ }
+
+ private @Nullable ByteBuffer readChunkDataRaw(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ if (!sector.hasData()) {
+ return null;
+ }
+
+ return sector.read(this.channel);
+ }
+
+ private void clearChunkData(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.clear();
+
+ this.writeHeaders();
+ }
+
+ private static int getChunkIndex(int x, int z) {
+ return (x & 31) + ((z & 31) << 5);
+ }
+
+ private boolean hasData(int chunkOriginal) {
+ return this.sectors[chunkOriginal].hasData();
+ }
+
+ private void writeChunk(int x, int z, @NotNull ByteBuffer data) throws IOException {
+ final int chunkIndex = getChunkIndex(x, z);
+
+ final int oldPositionOfData = data.position();
+ final int xxHash32OfData = this.xxHash32.hash(data, this.xxHash32Seed);
+ data.position(oldPositionOfData);
+
+ final ByteBuffer compressedData = this.compress(this.ensureDirectBuffer(data));
+ // uncompressed length + timestamp + xxhash32
+ final ByteBuffer chunkSectionBuilder = ByteBuffer.allocateDirect(compressedData.remaining() + 4 + 8 + 4);
+
+ chunkSectionBuilder.putInt(data.remaining()); // Uncompressed length
+ chunkSectionBuilder.putLong(System.nanoTime()); // Timestamp
+ chunkSectionBuilder.putInt(xxHash32OfData); // xxHash32 of the original data
+ chunkSectionBuilder.put(compressedData); // Compressed data
+ chunkSectionBuilder.flip();
+
+ this.writeChunkDataRaw(chunkIndex, chunkSectionBuilder);
+ DirectBufferReleaser.clean(chunkSectionBuilder);
+ }
+
+ private @Nullable ByteBuffer readChunk(int x, int z) throws IOException {
+ final ByteBuffer compressed = this.readChunkDataRaw(getChunkIndex(x, z));
+
+ if (compressed == null) {
+ return null;
+ }
+
+ final int uncompressedLength = compressed.getInt();
+ final long timestamp = compressed.getLong();
+ final int dataXXHash32 = compressed.getInt();
+
+ final ByteBuffer decompressed = this.decompress(this.ensureDirectBuffer(compressed), uncompressedLength);
+
+ DirectBufferReleaser.clean(compressed);
+
+ final IOException xxHash32CheckFailedEx = this.checkXXHash32(dataXXHash32, decompressed);
+ if (xxHash32CheckFailedEx != null) {
+ throw xxHash32CheckFailedEx;
+ }
+
+ return decompressed;
+ }
+
+ private @NotNull ByteBuffer ensureDirectBuffer(@NotNull ByteBuffer buffer) {
+ if (buffer.isDirect()) {
+ return buffer;
+ }
+
+ ByteBuffer direct = ByteBuffer.allocateDirect(buffer.remaining());
+ int originalPosition = buffer.position();
+ direct.put(buffer);
+ direct.flip();
+ buffer.position(originalPosition);
+
+ return direct;
+ }
+
+ private @NotNull ByteBuffer compress(@NotNull ByteBuffer input) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] compressed = com.github.luben.zstd.Zstd.compress(inputArray, this.compressionLevel);
+
+ ByteBuffer result = ByteBuffer.allocateDirect(compressed.length);
+ result.put(compressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Compression failed for input size: " + input.remaining(), e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @NotNull ByteBuffer decompress(@NotNull ByteBuffer input, int originalSize) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] decompressed = com.github.luben.zstd.Zstd.decompress(inputArray, originalSize);
+
+ if (decompressed.length != originalSize) {
+ throw new IOException("Decompression size mismatch: expected " +
+ originalSize + ", got " + decompressed.length);
+ }
+
+ ByteBuffer result = ByteBuffer.allocateDirect(originalSize);
+ result.put(decompressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Decompression failed", e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @Nullable IOException checkXXHash32(long originalXXHash32, @NotNull ByteBuffer input) {
+ final int oldPositionOfInput = input.position();
+ final int currentXXHash32 = this.xxHash32.hash(input, this.xxHash32Seed);
+ input.position(oldPositionOfInput);
+
+ if (originalXXHash32 != currentXXHash32) {
+ return new IOException("XXHash32 check failed ! Expected: " + originalXXHash32 + ",but got: " + currentXXHash32);
+ }
+
+ return null;
+ }
+
+ @Override
+ public Path getPath() {
+ return this.filePath;
+ }
+
+ @Override
+ public DataInputStream getChunkDataInputStream(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.readLock().lock();
+ try {
+ final ByteBuffer data = this.readChunk(pos.x, pos.z);
+
+ if (data == null) {
+ return null;
+ }
+
+ final byte[] dataBytes = new byte[data.remaining()];
+ data.get(dataBytes);
+
+ DirectBufferReleaser.clean(data);
+
+ return new DataInputStream(new ByteArrayInputStream(dataBytes));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean doesChunkExist(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public DataOutputStream getChunkDataOutputStream(ChunkPos pos) {
+ return new DataOutputStream(new ChunkBufferHelper(pos));
+ }
+
+ @Override
+ public void clear(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.clearChunkData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean hasChunk(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public void write(@NotNull ChunkPos pos, ByteBuffer buf) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.writeChunk(pos.x, pos.z, buf);
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ // MCC 的玩意,这东西也用不上给Linear了()
+ @Override
+ public CompoundTag getOversizedData(int x, int z) {
+ return null;
+ }
+
+ @Override
+ public boolean isOversized(int x, int z) {
+ return false;
+ }
+
+ @Override
+ public boolean recalculateHeader() {
+ return false;
+ }
+
+ @Override
+ public void setOversized(int x, int z, boolean oversized) {
+
+ }
+ // MCC end
+
+ @Override
+ public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) {
+ final DataOutputStream out = this.getChunkDataOutputStream(pos);
+
+ return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData(
+ data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE,
+ out, regionFile -> out.close()
+ );
+ }
+
+ @Override
+ public void flush() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.flushInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.closeInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ private class Sector{
+ private final int index;
+ private long offset;
+ private long length;
+ private boolean hasData = false;
+
+ private Sector(int index, long offset, long length) {
+ this.index = index;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public @NotNull ByteBuffer read(@NotNull FileChannel channel) throws IOException {
+ final ByteBuffer result = ByteBuffer.allocateDirect((int) this.length);
+
+ channel.read(result, this.offset);
+ result.flip();
+
+ return result;
+ }
+
+ public void store(@NotNull ByteBuffer newData, @NotNull FileChannel channel) throws IOException {
+ this.hasData = true;
+ this.length = newData.remaining();
+ this.offset = currentAcquiredIndex;
+
+ BufferedLinearRegionFile.this.currentAcquiredIndex += this.length;
+
+ long offset = this.offset;
+ while (newData.hasRemaining()) {
+ offset += channel.write(newData, offset);
+ }
+ }
+
+ private @NotNull ByteBuffer getEncoded() {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(sizeOfSingle());
+
+ buffer.putLong(this.offset);
+ buffer.putLong(this.length);
+ buffer.put((byte) (this.hasData ? 1 : 0));
+ buffer.flip();
+
+ return buffer;
+ }
+
+ public void restoreFrom(@NotNull ByteBuffer buffer) {
+ this.offset = buffer.getLong();
+ this.length = buffer.getLong();
+ this.hasData = buffer.get() == 1;
+
+ if (this.length < 0 || this.offset < 0) {
+ throw new IllegalStateException("Invalid sector data: " + this);
+ }
+ }
+
+ public void clear() {
+ this.hasData = false;
+ }
+
+ public boolean hasData() {
+ return this.hasData;
+ }
+
+ static int sizeOfSingle() {
+ // offset + length hasData
+ return Long.BYTES * 2 + 1;
+ }
+ }
+
+ private class ChunkBufferHelper extends ByteArrayOutputStream {
+ private final ChunkPos pos;
+
+ private ChunkBufferHelper(ChunkPos pos) {
+ this.pos = pos;
+ }
+
+ @Override
+ public void close() throws IOException {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().lock();
+ try {
+ ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count);
+
+ BufferedLinearRegionFile.this.writeChunk(this.pos.x, this.pos.z, bytebuffer);
+ }finally {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().unlock();
+ }
+ }
+ }
+}

View File

@@ -0,0 +1,37 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java
@@ -1,0 +_,34 @@
+package me.earthme.luminol.utils;
+
+import org.jetbrains.annotations.NotNull;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+
+public class DirectBufferReleaser {
+ private static final Method CLEANER_METHOD;
+ private static final Object UNSAFE;
+
+ static {
+ try {
+ Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
+ Field theUnsafe = unsafeClass.getDeclaredField("theUnsafe");
+ theUnsafe.setAccessible(true);
+ UNSAFE = theUnsafe.get(null);
+ CLEANER_METHOD = unsafeClass.getMethod("invokeCleaner", ByteBuffer.class);
+ } catch (Exception ex) {
+ throw new RuntimeException("Unsafe init failed", ex);
+ }
+ }
+
+ public static boolean clean(@NotNull ByteBuffer buffer) {
+ if (!buffer.isDirect()) return false;
+ try {
+ CLEANER_METHOD.invoke(UNSAFE, buffer);
+ return true;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+}

View File

@@ -1,16 +1,18 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java
@@ -1,0 +_,40 @@
@@ -1,0 +_,42 @@
+package me.earthme.luminol.utils;
+
+import abomination.LinearRegionFile;
+import me.earthme.luminol.config.modules.misc.RegionFormatConfig;
+import me.earthme.luminol.data.BufferedLinearRegionFile;
+import net.minecraft.world.level.chunk.storage.RegionFile;
+import org.jetbrains.annotations.Nullable;
+
+public enum EnumRegionFormat {
+ MCA("mca", "mca" , (info) -> new RegionFile(info.info(), info.filePath(), info.folder(), info.sync())),
+ LINEAR_V2("linear_v2", "linear" ,(info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel));
+ LINEAR_V2("linear_v2", "linear" ,(info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel)),
+ B_LINEAR("b_linear", "b_linear", (info) -> new BufferedLinearRegionFile(info.filePath(), RegionFormatConfig.linearCompressionLevel));
+
+ private final String name;
+ private final String argument;