Compare commits

..

1 Commits

Author SHA1 Message Date
Helvetica Volubi
68e8267c24 refactor: use standard file structure 2025-06-18 11:56:26 +08:00
171 changed files with 6414 additions and 6732 deletions

View File

@@ -4,177 +4,6 @@ Date: Sun, 12 Jan 2025 13:27:38 +0800
Subject: [PATCH] Pufferfish Sentry
diff --git a/src/main/java/gg/pufferfish/pufferfish/sentry/SentryContext.java b/src/main/java/gg/pufferfish/pufferfish/sentry/SentryContext.java
new file mode 100644
index 0000000000000000000000000000000000000000..c7772aac00f6db664f7a5673bc2585fa025e6aad
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/sentry/SentryContext.java
@@ -0,0 +1,165 @@
+package gg.pufferfish.pufferfish.sentry;
+
+import com.google.gson.Gson;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.logging.log4j.ThreadContext;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+import org.bukkit.event.Event;
+import org.bukkit.event.player.PlayerEvent;
+import org.bukkit.plugin.Plugin;
+import org.bukkit.plugin.RegisteredListener;
+import org.jetbrains.annotations.Nullable;
+
+public class SentryContext {
+
+ private static final Gson GSON = new Gson();
+
+ public static void setPluginContext(@Nullable Plugin plugin) {
+ if (plugin != null) {
+ ThreadContext.put("pufferfishsentry_pluginname", plugin.getName());
+ ThreadContext.put("pufferfishsentry_pluginversion", plugin.getPluginMeta().getVersion());
+ }
+ }
+
+ public static void removePluginContext() {
+ ThreadContext.remove("pufferfishsentry_pluginname");
+ ThreadContext.remove("pufferfishsentry_pluginversion");
+ }
+
+ public static void setSenderContext(@Nullable CommandSender sender) {
+ if (sender != null) {
+ ThreadContext.put("pufferfishsentry_playername", sender.getName());
+ if (sender instanceof Player player) {
+ ThreadContext.put("pufferfishsentry_playerid", player.getUniqueId().toString());
+ }
+ }
+ }
+
+ public static void removeSenderContext() {
+ ThreadContext.remove("pufferfishsentry_playername");
+ ThreadContext.remove("pufferfishsentry_playerid");
+ }
+
+ public static void setEventContext(Event event, RegisteredListener registration) {
+ setPluginContext(registration.getPlugin());
+
+ try {
+ // Find the player that was involved with this event
+ Player player = null;
+ if (event instanceof PlayerEvent) {
+ player = ((PlayerEvent) event).getPlayer();
+ } else {
+ Class<? extends Event> eventClass = event.getClass();
+
+ Field playerField = null;
+
+ for (Field field : eventClass.getDeclaredFields()) {
+ if (field.getType().equals(Player.class)) {
+ playerField = field;
+ break;
+ }
+ }
+
+ if (playerField != null) {
+ playerField.setAccessible(true);
+ player = (Player) playerField.get(event);
+ }
+ }
+
+ if (player != null) {
+ setSenderContext(player);
+ }
+ } catch (Exception ignored) {
+ } // We can't really safely log exceptions.
+
+ ThreadContext.put("pufferfishsentry_eventdata", GSON.toJson(serializeFields(event)));
+ }
+
+ public static void removeEventContext() {
+ removePluginContext();
+ removeSenderContext();
+ ThreadContext.remove("pufferfishsentry_eventdata");
+ }
+
+ private static Map<String, String> serializeFields(Object object) {
+ Map<String, String> fields = new TreeMap<>();
+ fields.put("_class", object.getClass().getName());
+ for (Field declaredField : object.getClass().getDeclaredFields()) {
+ try {
+ if (Modifier.isStatic(declaredField.getModifiers())) {
+ continue;
+ }
+
+ String fieldName = declaredField.getName();
+ if (fieldName.equals("handlers")) {
+ continue;
+ }
+ declaredField.setAccessible(true);
+ Object value = declaredField.get(object);
+ if (value != null) {
+ fields.put(fieldName, value.toString());
+ } else {
+ fields.put(fieldName, "<null>");
+ }
+ } catch (Exception ignored) {
+ } // We can't really safely log exceptions.
+ }
+ return fields;
+ }
+
+ public static class State {
+
+ private Plugin plugin;
+ private Command command;
+ private String commandLine;
+ private Event event;
+ private RegisteredListener registeredListener;
+
+ public Plugin getPlugin() {
+ return plugin;
+ }
+
+ public void setPlugin(Plugin plugin) {
+ this.plugin = plugin;
+ }
+
+ public Command getCommand() {
+ return command;
+ }
+
+ public void setCommand(Command command) {
+ this.command = command;
+ }
+
+ public String getCommandLine() {
+ return commandLine;
+ }
+
+ public void setCommandLine(String commandLine) {
+ this.commandLine = commandLine;
+ }
+
+ public Event getEvent() {
+ return event;
+ }
+
+ public void setEvent(Event event) {
+ this.event = event;
+ }
+
+ public RegisteredListener getRegisteredListener() {
+ return registeredListener;
+ }
+
+ public void setRegisteredListener(RegisteredListener registeredListener) {
+ this.registeredListener = registeredListener;
+ }
+ }
+}
diff --git a/src/main/java/org/bukkit/plugin/SimplePluginManager.java b/src/main/java/org/bukkit/plugin/SimplePluginManager.java
index ab36e3aaff57e2f27b5aed06b4bdfe277f86a35e..96da9f1082ab134d197b3a6069f2fcdf38585efe 100644
--- a/src/main/java/org/bukkit/plugin/SimplePluginManager.java

View File

@@ -4,177 +4,6 @@ Date: Sun, 12 Jan 2025 14:00:28 +0800
Subject: [PATCH] Pufferfish SIMD Utilities
diff --git a/src/main/java/gg/pufferfish/pufferfish/simd/SIMDChecker.java b/src/main/java/gg/pufferfish/pufferfish/simd/SIMDChecker.java
new file mode 100644
index 0000000000000000000000000000000000000000..856de1331b15542c00e01990f471fa5152722c11
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/simd/SIMDChecker.java
@@ -0,0 +1,35 @@
+package gg.pufferfish.pufferfish.simd;
+
+import jdk.incubator.vector.FloatVector;
+import jdk.incubator.vector.IntVector;
+import jdk.incubator.vector.VectorSpecies;
+import org.slf4j.Logger;
+
+/**
+ * Basically, java is annoying and we have to push this out to its own class.
+ */
+@Deprecated
+public class SIMDChecker {
+
+ @Deprecated
+ public static boolean canEnable(Logger logger) {
+ try {
+ SIMDDetection.testRun = true;
+
+ VectorSpecies<Integer> ISPEC = IntVector.SPECIES_PREFERRED;
+ VectorSpecies<Float> FSPEC = FloatVector.SPECIES_PREFERRED;
+
+ logger.info("Max SIMD vector size on this system is {} bits (int)", ISPEC.vectorBitSize());
+ logger.info("Max SIMD vector size on this system is " + FSPEC.vectorBitSize() + " bits (float)");
+
+ if (ISPEC.elementSize() < 2 || FSPEC.elementSize() < 2) {
+ logger.warn("SIMD is not properly supported on this system!");
+ return false;
+ }
+
+ return true;
+ } catch (NoClassDefFoundError | Exception ignored) {} // Basically, we don't do anything. This lets us detect if it's not functional and disable it.
+ return false;
+ }
+
+}
diff --git a/src/main/java/gg/pufferfish/pufferfish/simd/SIMDDetection.java b/src/main/java/gg/pufferfish/pufferfish/simd/SIMDDetection.java
new file mode 100644
index 0000000000000000000000000000000000000000..0a64cd0e88083ac4af6674ad0fb07b771109c737
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/simd/SIMDDetection.java
@@ -0,0 +1,34 @@
+package gg.pufferfish.pufferfish.simd;
+
+import org.slf4j.Logger;
+
+@Deprecated
+public class SIMDDetection {
+
+ public static boolean isEnabled = false;
+ public static boolean testRun = false;
+
+ @Deprecated
+ public static boolean canEnable(Logger logger) {
+ try {
+ return SIMDChecker.canEnable(logger);
+ } catch (NoClassDefFoundError | Exception ignored) {
+ return false;
+ }
+ }
+
+ @Deprecated
+ public static int getJavaVersion() {
+ // https://stackoverflow.com/a/2591122
+ String version = System.getProperty("java.version");
+ if(version.startsWith("1.")) {
+ version = version.substring(2, 3);
+ } else {
+ int dot = version.indexOf(".");
+ if(dot != -1) { version = version.substring(0, dot); }
+ }
+ version = version.split("-")[0]; // Azul is stupid
+ return Integer.parseInt(version);
+ }
+
+}
diff --git a/src/main/java/gg/pufferfish/pufferfish/simd/VectorMapPalette.java b/src/main/java/gg/pufferfish/pufferfish/simd/VectorMapPalette.java
new file mode 100644
index 0000000000000000000000000000000000000000..c26dcaaa2e85882730c854099df80d69eec70f33
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/simd/VectorMapPalette.java
@@ -0,0 +1,84 @@
+package gg.pufferfish.pufferfish.simd;
+
+import jdk.incubator.vector.FloatVector;
+import jdk.incubator.vector.IntVector;
+import jdk.incubator.vector.VectorMask;
+import jdk.incubator.vector.VectorSpecies;
+import org.bukkit.map.MapPalette;
+
+import java.awt.*;
+
+@Deprecated
+public class VectorMapPalette {
+
+ private static final VectorSpecies<Integer> I_SPEC = IntVector.SPECIES_PREFERRED;
+ private static final VectorSpecies<Float> F_SPEC = FloatVector.SPECIES_PREFERRED;
+
+ @Deprecated
+ public static void matchColorVectorized(int[] in, byte[] out) {
+ int speciesLength = I_SPEC.length();
+ int i;
+ for (i = 0; i < in.length - speciesLength; i += speciesLength) {
+ float[] redsArr = new float[speciesLength];
+ float[] bluesArr = new float[speciesLength];
+ float[] greensArr = new float[speciesLength];
+ int[] alphasArr = new int[speciesLength];
+
+ for (int j = 0; j < speciesLength; j++) {
+ alphasArr[j] = (in[i + j] >> 24) & 0xFF;
+ redsArr[j] = (in[i + j] >> 16) & 0xFF;
+ greensArr[j] = (in[i + j] >> 8) & 0xFF;
+ bluesArr[j] = (in[i + j] >> 0) & 0xFF;
+ }
+
+ IntVector alphas = IntVector.fromArray(I_SPEC, alphasArr, 0);
+ FloatVector reds = FloatVector.fromArray(F_SPEC, redsArr, 0);
+ FloatVector greens = FloatVector.fromArray(F_SPEC, greensArr, 0);
+ FloatVector blues = FloatVector.fromArray(F_SPEC, bluesArr, 0);
+ IntVector resultIndex = IntVector.zero(I_SPEC);
+ VectorMask<Integer> modificationMask = VectorMask.fromLong(I_SPEC, 0xffffffff);
+
+ modificationMask = modificationMask.and(alphas.lt(128).not());
+ FloatVector bestDistances = FloatVector.broadcast(F_SPEC, Float.MAX_VALUE);
+
+ for (int c = 4; c < MapPalette.colors.length; c++) {
+ // We're using 32-bit floats here because it's 2x faster and nobody will know the difference.
+ // For correctness, the original algorithm uses 64-bit floats instead. Completely unnecessary.
+ FloatVector compReds = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getRed());
+ FloatVector compGreens = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getGreen());
+ FloatVector compBlues = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getBlue());
+
+ FloatVector rMean = reds.add(compReds).div(2.0f);
+ FloatVector rDiff = reds.sub(compReds);
+ FloatVector gDiff = greens.sub(compGreens);
+ FloatVector bDiff = blues.sub(compBlues);
+
+ FloatVector weightR = rMean.div(256.0f).add(2);
+ FloatVector weightG = FloatVector.broadcast(F_SPEC, 4.0f);
+ FloatVector weightB = FloatVector.broadcast(F_SPEC, 255.0f).sub(rMean).div(256.0f).add(2.0f);
+
+ FloatVector distance = weightR.mul(rDiff).mul(rDiff).add(weightG.mul(gDiff).mul(gDiff)).add(weightB.mul(bDiff).mul(bDiff));
+
+ // Now we compare to the best distance we've found.
+ // This mask contains a "1" if better, and a "0" otherwise.
+ VectorMask<Float> bestDistanceMask = distance.lt(bestDistances);
+ bestDistances = bestDistances.blend(distance, bestDistanceMask); // Update the best distances
+
+ // Update the result array
+ // We also AND with the modification mask because we don't want to interfere if the alpha value isn't large enough.
+ resultIndex = resultIndex.blend(c, bestDistanceMask.cast(I_SPEC).and(modificationMask)); // Update the results
+ }
+
+ for (int j = 0; j < speciesLength; j++) {
+ int index = resultIndex.lane(j);
+ out[i + j] = (byte) (index < 128 ? index : -129 + (index - 127));
+ }
+ }
+
+ // For the final ones, fall back to the regular method
+ for (; i < in.length; i++) {
+ out[i] = MapPalette.matchColor(new Color(in[i], true));
+ }
+ }
+
+}
diff --git a/src/main/java/org/bukkit/map/MapPalette.java b/src/main/java/org/bukkit/map/MapPalette.java
index fc9728342de7605da69813fb44b008c1343124c0..d322e6c47d751b41e4b2f2fc45bb8d7498bff21d 100644
--- a/src/main/java/org/bukkit/map/MapPalette.java

View File

@@ -4,193 +4,6 @@ Date: Mon, 27 Jan 2025 13:01:59 +0800
Subject: [PATCH] Tick regions api
diff --git a/src/main/java/me/earthme/luminol/api/RegionStats.java b/src/main/java/me/earthme/luminol/api/RegionStats.java
new file mode 100644
index 0000000000000000000000000000000000000000..96147cace1550d14c682258dab0397587dcf76a4
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/RegionStats.java
@@ -0,0 +1,25 @@
+package me.earthme.luminol.api;
+
+/**
+ * A simple package of folia's tick region state.It linked to the RegionStats of the nms part so that</br>
+ * You could call these methods to get the status of this tick region</br>
+ */
+public interface RegionStats {
+ /**
+ * Get the entity count in this tick region
+ * @return the entity count
+ */
+ int getEntityCount();
+
+ /**
+ * Get the player count in this tick region
+ * @return the player count
+ */
+ int getPlayerCount();
+
+ /**
+ * Get the chunk count in this tick region
+ * @return the chunk count
+ */
+ int getChunkCount();
+}
diff --git a/src/main/java/me/earthme/luminol/api/ThreadedRegion.java b/src/main/java/me/earthme/luminol/api/ThreadedRegion.java
new file mode 100644
index 0000000000000000000000000000000000000000..01dac0602b5f66f80c0adfbb779666fe0325a24f
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/ThreadedRegion.java
@@ -0,0 +1,56 @@
+package me.earthme.luminol.api;
+
+import org.bukkit.Location;
+import org.bukkit.World;
+
+import javax.annotation.Nullable;
+
+/**
+ * A mirror of folia's ThreadedRegion</br>
+ * Including some handy methods to get the information of the tick region</br>
+ * Note: You should call these methods inside this tick region's thread context
+ */
+public interface ThreadedRegion {
+ /**
+ * Get the center chunk pos of this tick region</br>
+ * Note:</br>
+ * 1.Global region will return a null value(But we don't finish the global region yet()</br>
+ * 2.You should call these methods inside this tick region's thread context
+ * @return The center chunk pos
+ */
+ @Nullable
+ Location getCenterChunkPos();
+
+ /**
+ * Get the dead section percent of this tick region
+ * Note: </br>
+ * 1.Dead percent is mean the percent of the unloaded chunk count of this tick region, which is also used for determine
+ * that the tick region should or not check for splitting</br>
+ * 2.You should call these methods inside this tick region's thread context
+ * @return The dead section percent
+ */
+ double getDeadSectionPercent();
+
+ /**
+ * Get the tick region data of this tick region</br>
+ * Note:</br>
+ * 1.You should call this method inside this tick region's thread context</br>
+ * 2.You should call these methods inside this tick region's thread context
+ * @return The tick region data
+ */
+ TickRegionData getTickRegionData();
+
+ /**
+ * Get the world of this tick region</br>
+ * Note: Global region will return a null value too
+ * @return The world of this tick region
+ */
+ @Nullable
+ World getWorld();
+
+ /**
+ * Get the id of the tick region</br>
+ * @return The id of the tick region
+ */
+ long getId();
+}
diff --git a/src/main/java/me/earthme/luminol/api/ThreadedRegionizer.java b/src/main/java/me/earthme/luminol/api/ThreadedRegionizer.java
new file mode 100644
index 0000000000000000000000000000000000000000..ff31a68a019fd9e5e687e6818f8729f4950bc060
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/ThreadedRegionizer.java
@@ -0,0 +1,56 @@
+package me.earthme.luminol.api;
+
+import org.bukkit.Location;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.Collection;
+
+/**
+ * A mirror of folia's ThreadedRegionizer
+ */
+public interface ThreadedRegionizer {
+ /**
+ * Get all the tick regions
+ * @return Temporary copied collection of all tick regions
+ */
+ Collection<ThreadedRegion> getAllRegions();
+
+ /**
+ * Get the tick region at the given chunk coordinates
+ * @param chunkX Chunk X
+ * @param chunkZ Chunk Z
+ * @return The tick region at the given chunk coordinates
+ */
+ @Nullable
+ ThreadedRegion getAtSynchronized(int chunkX, int chunkZ);
+
+ /**
+ * Get the tick region at the given chunk coordinates
+ * @param chunkX Chunk X
+ * @param chunkZ Chunk Z
+ * @return The tick region at the given chunk coordinates
+ */
+ @Nullable
+ ThreadedRegion getAtUnSynchronized(int chunkX, int chunkZ);
+
+ /**
+ * Get the tick region at the given location
+ * @param pos The location
+ * @return The tick region at the given location
+ */
+ @Nullable
+ default ThreadedRegion getAtSynchronized(@NotNull Location pos) {
+ return this.getAtSynchronized(pos.getBlockX() >> 4, pos.getBlockZ() >> 4);
+ }
+
+ /**
+ * Get the tick region at the given location
+ * @param pos The location
+ * @return The tick region at the given location
+ */
+ @Nullable
+ default ThreadedRegion getAtUnSynchronized(@NotNull Location pos) {
+ return this.getAtUnSynchronized(pos.getBlockX() >> 4, pos.getBlockZ() >> 4);
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/TickRegionData.java b/src/main/java/me/earthme/luminol/api/TickRegionData.java
new file mode 100644
index 0000000000000000000000000000000000000000..ecde4462b08d701b8bff9f26902f17754cf791dd
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/TickRegionData.java
@@ -0,0 +1,26 @@
+package me.earthme.luminol.api;
+
+import org.bukkit.World;
+
+/**
+ * A mirror of folia's tick region data
+ */
+public interface TickRegionData {
+ /**
+ * Get the world it's currently holding
+ * @return the world
+ */
+ World getWorld();
+
+ /**
+ * Get the current tick count
+ * @return the current tick count
+ */
+ long getCurrentTickCount();
+
+ /**
+ * Get the region stats
+ * @return the region stats
+ */
+ RegionStats getRegionStats();
+}
diff --git a/src/main/java/org/bukkit/World.java b/src/main/java/org/bukkit/World.java
index a8b64f78bf3c453094074b4b4d3c8fd07b9eb273..7927012c1afe5289d22879353a88a4574da91e01 100644
--- a/src/main/java/org/bukkit/World.java

View File

@@ -1,357 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: MrHua269 <wangxyper@163.com>
Date: Fri, 31 Jan 2025 20:28:47 +0800
Subject: [PATCH] Add missing teleportation apis for folia
diff --git a/src/main/java/me/earthme/luminol/api/entity/EntityTeleportAsyncEvent.java b/src/main/java/me/earthme/luminol/api/entity/EntityTeleportAsyncEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..a31c803831dad3d31386924cbe27deff59855fc9
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/entity/EntityTeleportAsyncEvent.java
@@ -0,0 +1,68 @@
+package me.earthme.luminol.api.entity;
+
+import org.apache.commons.lang3.Validate;
+import org.bukkit.Location;
+import org.bukkit.entity.Entity;
+import org.bukkit.event.Cancellable;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.bukkit.event.player.PlayerTeleportEvent;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A simple event fired when a teleportAsync was called
+ * @see org.bukkit.entity.Entity#teleportAsync(org.bukkit.Location, org.bukkit.event.player.PlayerTeleportEvent.TeleportCause)
+ * @see org.bukkit.entity.Entity#teleportAsync(org.bukkit.Location)
+ * (Also fired when teleportAsync called from nms)
+ */
+public class EntityTeleportAsyncEvent extends Event {
+ private static final HandlerList HANDLERS = new HandlerList();
+
+ private final Entity entity;
+ private final PlayerTeleportEvent.TeleportCause teleportCause;
+ private final Location destination;
+
+ public EntityTeleportAsyncEvent(Entity entity, PlayerTeleportEvent.TeleportCause teleportCause, Location destination) {
+ Validate.notNull(entity, "entity cannot be a null value!");
+ Validate.notNull(teleportCause, "teleportCause cannot be a null value!");
+ Validate.notNull(destination, "destination cannot be a null value!");
+
+ this.entity = entity;
+ this.teleportCause = teleportCause;
+ this.destination = destination;
+ }
+
+ /**
+ * Get the entity is about to be teleported
+ * @return that entity
+ */
+ public @NotNull Entity getEntity() {
+ return this.entity;
+ }
+
+ /**
+ * Get the cause of the teleport
+ * @return the cause
+ */
+ public @NotNull PlayerTeleportEvent.TeleportCause getTeleportCause() {
+ return this.teleportCause;
+ }
+
+ /**
+ * Get the destination of the teleport
+ * @return the destination
+ */
+ public @NotNull Location getDestination() {
+ return this.destination;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLERS;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLERS;
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/entity/PostEntityPortalEvent.java b/src/main/java/me/earthme/luminol/api/entity/PostEntityPortalEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..dd3087b407ccf4e96448701e6fbf75705498f982
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/entity/PostEntityPortalEvent.java
@@ -0,0 +1,41 @@
+package me.earthme.luminol.api.entity;
+
+import org.apache.commons.lang3.Validate;
+import org.bukkit.entity.Entity;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A simple event created for missing teleport events api of folia
+ * This event is fired when the entity portal process has been done
+ */
+public class PostEntityPortalEvent extends Event {
+ private static final HandlerList HANDLER_LIST = new HandlerList();
+
+ private final Entity teleportedEntity;
+
+ public PostEntityPortalEvent(Entity teleportedEntity) {
+ Validate.notNull(teleportedEntity, "teleportedEntity cannot be null!");
+
+ this.teleportedEntity = teleportedEntity;
+ }
+
+ /**
+ * Get the entity which was teleported
+ * @return the entity which was teleported
+ */
+ public Entity getTeleportedEntity() {
+ return this.teleportedEntity;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLER_LIST;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLER_LIST;
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/entity/PreEntityPortalEvent.java b/src/main/java/me/earthme/luminol/api/entity/PreEntityPortalEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..fc844429e3ecfe2529c0a49b8a5d958eeb188ad9
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/entity/PreEntityPortalEvent.java
@@ -0,0 +1,78 @@
+package me.earthme.luminol.api.entity;
+
+import org.apache.commons.lang3.Validate;
+import org.bukkit.Location;
+import org.bukkit.World;
+import org.bukkit.entity.Entity;
+import org.bukkit.event.Cancellable;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A simple event created for missing teleport events api of folia
+ * This event will be fired when a portal teleportation is about to happen
+ */
+public class PreEntityPortalEvent extends Event implements Cancellable {
+ private static final HandlerList HANDLERS = new HandlerList();
+
+ private final Entity entity;
+ private final Location portalPos;
+ private final World destination;
+
+ private boolean cancelled = false;
+
+ public PreEntityPortalEvent(Entity entity, Location portalPos, World destination) {
+ Validate.notNull(entity, "entity cannot be null!");
+ Validate.notNull(portalPos, "portalPos cannot be null!");
+ Validate.notNull(destination, "destination cannot be null!");
+
+ this.entity = entity;
+ this.portalPos = portalPos;
+ this.destination = destination;
+ }
+
+ /**
+ * Get the entity that is about to teleport
+ * @return the entity
+ */
+ public @NotNull Entity getEntity() {
+ return this.entity;
+ }
+
+ /**
+ * Get the location of the portal
+ * @return the portal location
+ */
+ public @NotNull Location getPortalPos() {
+ return this.portalPos;
+ }
+
+ /**
+ * Get the destination world
+ * @return the destination world
+ */
+ public @NotNull World getDestination() {
+ return this.destination;
+ }
+
+ @Override
+ public boolean isCancelled() {
+ return this.cancelled;
+ }
+
+ @Override
+ public void setCancelled(boolean cancel) {
+ this.cancelled = cancel;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLERS;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLERS;
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/entity/player/PostPlayerRespawnEvent.java b/src/main/java/me/earthme/luminol/api/entity/player/PostPlayerRespawnEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..9a561455560dfeee1d8762297ebf15a7c11de4d1
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/entity/player/PostPlayerRespawnEvent.java
@@ -0,0 +1,40 @@
+package me.earthme.luminol.api.entity.player;
+
+import org.apache.commons.lang3.Validate;
+import org.bukkit.entity.Player;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A simple event fired when the respawn process of player is done
+ */
+public class PostPlayerRespawnEvent extends Event {
+ private static final HandlerList HANDLERS = new HandlerList();
+
+ private final Player player;
+
+ public PostPlayerRespawnEvent(Player player) {
+ Validate.notNull(player, "Player cannot be a null value!");
+
+ this.player = player;
+ }
+
+ /**
+ * Get the respawned player
+ * @return the player
+ */
+ public @NotNull Player getPlayer() {
+ return this.player;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLERS;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLERS;
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/portal/EndPlatformCreateEvent.java b/src/main/java/me/earthme/luminol/api/portal/EndPlatformCreateEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..cf87a7cce5d1ebec9709b762595609344807150b
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/portal/EndPlatformCreateEvent.java
@@ -0,0 +1,35 @@
+package me.earthme.luminol.api.portal;
+
+import org.bukkit.event.Cancellable;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A event fired when an end platform is created.
+ */
+public class EndPlatformCreateEvent extends Event implements Cancellable {
+ private static final HandlerList HANDLERS = new HandlerList();
+
+ private boolean cancelled = false;
+
+ @Override
+ public boolean isCancelled() {
+ return this.cancelled;
+ }
+
+ @Override
+ public void setCancelled(boolean cancel) {
+ this.cancelled = cancel;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLERS;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLERS;
+ }
+}
diff --git a/src/main/java/me/earthme/luminol/api/portal/PortalLocateEvent.java b/src/main/java/me/earthme/luminol/api/portal/PortalLocateEvent.java
new file mode 100644
index 0000000000000000000000000000000000000000..e09ffb99aad6f6acca3d6a411877715b90413eb0
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/portal/PortalLocateEvent.java
@@ -0,0 +1,53 @@
+package me.earthme.luminol.api.portal;
+
+import org.apache.commons.lang3.Validate;
+import org.bukkit.Location;
+import org.bukkit.event.Event;
+import org.bukkit.event.HandlerList;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * A event fired when the portal process started locating the destination position
+ * Notice: If you changed the destination to an another position in end teleportation.The end platform won't create under the entity and won't create
+ * if the position is out of current tick region
+ */
+public class PortalLocateEvent extends Event {
+ private static final HandlerList HANDLERS = new HandlerList();
+
+ private final Location original;
+ private final Location destination;
+
+ public PortalLocateEvent(Location original, Location destination) {
+ Validate.notNull(original, "original couldn't be null!");
+ Validate.notNull(destination, "destination couldn't be null!");
+
+ this.original = original;
+ this.destination = destination;
+ }
+
+ /**
+ * Get the destination position of this teleportation
+ * @return the destination position
+ */
+ public Location getDestination() {
+ return this.destination;
+ }
+
+ /**
+ * Get the original portal position of this teleportation
+ * @return the original portal position
+ */
+ public Location getOriginal() {
+ return this.original;
+ }
+
+ @Override
+ public @NotNull HandlerList getHandlers() {
+ return HANDLERS;
+ }
+
+ @NotNull
+ public static HandlerList getHandlerList() {
+ return HANDLERS;
+ }
+}

View File

@@ -0,0 +1,165 @@
package gg.pufferfish.pufferfish.sentry;
import com.google.gson.Gson;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.TreeMap;
import org.apache.logging.log4j.ThreadContext;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.player.PlayerEvent;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.RegisteredListener;
import org.jetbrains.annotations.Nullable;
public class SentryContext {
private static final Gson GSON = new Gson();
public static void setPluginContext(@Nullable Plugin plugin) {
if (plugin != null) {
ThreadContext.put("pufferfishsentry_pluginname", plugin.getName());
ThreadContext.put("pufferfishsentry_pluginversion", plugin.getPluginMeta().getVersion());
}
}
public static void removePluginContext() {
ThreadContext.remove("pufferfishsentry_pluginname");
ThreadContext.remove("pufferfishsentry_pluginversion");
}
public static void setSenderContext(@Nullable CommandSender sender) {
if (sender != null) {
ThreadContext.put("pufferfishsentry_playername", sender.getName());
if (sender instanceof Player player) {
ThreadContext.put("pufferfishsentry_playerid", player.getUniqueId().toString());
}
}
}
public static void removeSenderContext() {
ThreadContext.remove("pufferfishsentry_playername");
ThreadContext.remove("pufferfishsentry_playerid");
}
public static void setEventContext(Event event, RegisteredListener registration) {
setPluginContext(registration.getPlugin());
try {
// Find the player that was involved with this event
Player player = null;
if (event instanceof PlayerEvent) {
player = ((PlayerEvent) event).getPlayer();
} else {
Class<? extends Event> eventClass = event.getClass();
Field playerField = null;
for (Field field : eventClass.getDeclaredFields()) {
if (field.getType().equals(Player.class)) {
playerField = field;
break;
}
}
if (playerField != null) {
playerField.setAccessible(true);
player = (Player) playerField.get(event);
}
}
if (player != null) {
setSenderContext(player);
}
} catch (Exception ignored) {
} // We can't really safely log exceptions.
ThreadContext.put("pufferfishsentry_eventdata", GSON.toJson(serializeFields(event)));
}
public static void removeEventContext() {
removePluginContext();
removeSenderContext();
ThreadContext.remove("pufferfishsentry_eventdata");
}
private static Map<String, String> serializeFields(Object object) {
Map<String, String> fields = new TreeMap<>();
fields.put("_class", object.getClass().getName());
for (Field declaredField : object.getClass().getDeclaredFields()) {
try {
if (Modifier.isStatic(declaredField.getModifiers())) {
continue;
}
String fieldName = declaredField.getName();
if (fieldName.equals("handlers")) {
continue;
}
declaredField.setAccessible(true);
Object value = declaredField.get(object);
if (value != null) {
fields.put(fieldName, value.toString());
} else {
fields.put(fieldName, "<null>");
}
} catch (Exception ignored) {
} // We can't really safely log exceptions.
}
return fields;
}
public static class State {
private Plugin plugin;
private Command command;
private String commandLine;
private Event event;
private RegisteredListener registeredListener;
public Plugin getPlugin() {
return plugin;
}
public void setPlugin(Plugin plugin) {
this.plugin = plugin;
}
public Command getCommand() {
return command;
}
public void setCommand(Command command) {
this.command = command;
}
public String getCommandLine() {
return commandLine;
}
public void setCommandLine(String commandLine) {
this.commandLine = commandLine;
}
public Event getEvent() {
return event;
}
public void setEvent(Event event) {
this.event = event;
}
public RegisteredListener getRegisteredListener() {
return registeredListener;
}
public void setRegisteredListener(RegisteredListener registeredListener) {
this.registeredListener = registeredListener;
}
}
}

View File

@@ -0,0 +1,35 @@
package gg.pufferfish.pufferfish.simd;
import jdk.incubator.vector.FloatVector;
import jdk.incubator.vector.IntVector;
import jdk.incubator.vector.VectorSpecies;
import org.slf4j.Logger;
/**
* Basically, java is annoying and we have to push this out to its own class.
*/
@Deprecated
public class SIMDChecker {
@Deprecated
public static boolean canEnable(Logger logger) {
try {
SIMDDetection.testRun = true;
VectorSpecies<Integer> ISPEC = IntVector.SPECIES_PREFERRED;
VectorSpecies<Float> FSPEC = FloatVector.SPECIES_PREFERRED;
logger.info("Max SIMD vector size on this system is {} bits (int)", ISPEC.vectorBitSize());
logger.info("Max SIMD vector size on this system is " + FSPEC.vectorBitSize() + " bits (float)");
if (ISPEC.elementSize() < 2 || FSPEC.elementSize() < 2) {
logger.warn("SIMD is not properly supported on this system!");
return false;
}
return true;
} catch (NoClassDefFoundError | Exception ignored) {} // Basically, we don't do anything. This lets us detect if it's not functional and disable it.
return false;
}
}

View File

@@ -0,0 +1,34 @@
package gg.pufferfish.pufferfish.simd;
import org.slf4j.Logger;
@Deprecated
public class SIMDDetection {
public static boolean isEnabled = false;
public static boolean testRun = false;
@Deprecated
public static boolean canEnable(Logger logger) {
try {
return SIMDChecker.canEnable(logger);
} catch (NoClassDefFoundError | Exception ignored) {
return false;
}
}
@Deprecated
public static int getJavaVersion() {
// https://stackoverflow.com/a/2591122
String version = System.getProperty("java.version");
if(version.startsWith("1.")) {
version = version.substring(2, 3);
} else {
int dot = version.indexOf(".");
if(dot != -1) { version = version.substring(0, dot); }
}
version = version.split("-")[0]; // Azul is stupid
return Integer.parseInt(version);
}
}

View File

@@ -0,0 +1,84 @@
package gg.pufferfish.pufferfish.simd;
import jdk.incubator.vector.FloatVector;
import jdk.incubator.vector.IntVector;
import jdk.incubator.vector.VectorMask;
import jdk.incubator.vector.VectorSpecies;
import org.bukkit.map.MapPalette;
import java.awt.*;
@Deprecated
public class VectorMapPalette {
private static final VectorSpecies<Integer> I_SPEC = IntVector.SPECIES_PREFERRED;
private static final VectorSpecies<Float> F_SPEC = FloatVector.SPECIES_PREFERRED;
@Deprecated
public static void matchColorVectorized(int[] in, byte[] out) {
int speciesLength = I_SPEC.length();
int i;
for (i = 0; i < in.length - speciesLength; i += speciesLength) {
float[] redsArr = new float[speciesLength];
float[] bluesArr = new float[speciesLength];
float[] greensArr = new float[speciesLength];
int[] alphasArr = new int[speciesLength];
for (int j = 0; j < speciesLength; j++) {
alphasArr[j] = (in[i + j] >> 24) & 0xFF;
redsArr[j] = (in[i + j] >> 16) & 0xFF;
greensArr[j] = (in[i + j] >> 8) & 0xFF;
bluesArr[j] = (in[i + j] >> 0) & 0xFF;
}
IntVector alphas = IntVector.fromArray(I_SPEC, alphasArr, 0);
FloatVector reds = FloatVector.fromArray(F_SPEC, redsArr, 0);
FloatVector greens = FloatVector.fromArray(F_SPEC, greensArr, 0);
FloatVector blues = FloatVector.fromArray(F_SPEC, bluesArr, 0);
IntVector resultIndex = IntVector.zero(I_SPEC);
VectorMask<Integer> modificationMask = VectorMask.fromLong(I_SPEC, 0xffffffff);
modificationMask = modificationMask.and(alphas.lt(128).not());
FloatVector bestDistances = FloatVector.broadcast(F_SPEC, Float.MAX_VALUE);
for (int c = 4; c < MapPalette.colors.length; c++) {
// We're using 32-bit floats here because it's 2x faster and nobody will know the difference.
// For correctness, the original algorithm uses 64-bit floats instead. Completely unnecessary.
FloatVector compReds = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getRed());
FloatVector compGreens = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getGreen());
FloatVector compBlues = FloatVector.broadcast(F_SPEC, MapPalette.colors[c].getBlue());
FloatVector rMean = reds.add(compReds).div(2.0f);
FloatVector rDiff = reds.sub(compReds);
FloatVector gDiff = greens.sub(compGreens);
FloatVector bDiff = blues.sub(compBlues);
FloatVector weightR = rMean.div(256.0f).add(2);
FloatVector weightG = FloatVector.broadcast(F_SPEC, 4.0f);
FloatVector weightB = FloatVector.broadcast(F_SPEC, 255.0f).sub(rMean).div(256.0f).add(2.0f);
FloatVector distance = weightR.mul(rDiff).mul(rDiff).add(weightG.mul(gDiff).mul(gDiff)).add(weightB.mul(bDiff).mul(bDiff));
// Now we compare to the best distance we've found.
// This mask contains a "1" if better, and a "0" otherwise.
VectorMask<Float> bestDistanceMask = distance.lt(bestDistances);
bestDistances = bestDistances.blend(distance, bestDistanceMask); // Update the best distances
// Update the result array
// We also AND with the modification mask because we don't want to interfere if the alpha value isn't large enough.
resultIndex = resultIndex.blend(c, bestDistanceMask.cast(I_SPEC).and(modificationMask)); // Update the results
}
for (int j = 0; j < speciesLength; j++) {
int index = resultIndex.lane(j);
out[i + j] = (byte) (index < 128 ? index : -129 + (index - 127));
}
}
// For the final ones, fall back to the regular method
for (; i < in.length; i++) {
out[i] = MapPalette.matchColor(new Color(in[i], true));
}
}
}

View File

@@ -0,0 +1,25 @@
package me.earthme.luminol.api;
/**
* A simple package of folia's tick region state.It linked to the RegionStats of the nms part so that</br>
* You could call these methods to get the status of this tick region</br>
*/
public interface RegionStats {
/**
* Get the entity count in this tick region
* @return the entity count
*/
int getEntityCount();
/**
* Get the player count in this tick region
* @return the player count
*/
int getPlayerCount();
/**
* Get the chunk count in this tick region
* @return the chunk count
*/
int getChunkCount();
}

View File

@@ -0,0 +1,56 @@
package me.earthme.luminol.api;
import org.bukkit.Location;
import org.bukkit.World;
import javax.annotation.Nullable;
/**
* A mirror of folia's ThreadedRegion</br>
* Including some handy methods to get the information of the tick region</br>
* Note: You should call these methods inside this tick region's thread context
*/
public interface ThreadedRegion {
/**
* Get the center chunk pos of this tick region</br>
* Note:</br>
* 1.Global region will return a null value(But we don't finish the global region yet()</br>
* 2.You should call these methods inside this tick region's thread context
* @return The center chunk pos
*/
@Nullable
Location getCenterChunkPos();
/**
* Get the dead section percent of this tick region
* Note: </br>
* 1.Dead percent is mean the percent of the unloaded chunk count of this tick region, which is also used for determine
* that the tick region should or not check for splitting</br>
* 2.You should call these methods inside this tick region's thread context
* @return The dead section percent
*/
double getDeadSectionPercent();
/**
* Get the tick region data of this tick region</br>
* Note:</br>
* 1.You should call this method inside this tick region's thread context</br>
* 2.You should call these methods inside this tick region's thread context
* @return The tick region data
*/
TickRegionData getTickRegionData();
/**
* Get the world of this tick region</br>
* Note: Global region will return a null value too
* @return The world of this tick region
*/
@Nullable
World getWorld();
/**
* Get the id of the tick region</br>
* @return The id of the tick region
*/
long getId();
}

View File

@@ -0,0 +1,56 @@
package me.earthme.luminol.api;
import org.bukkit.Location;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* A mirror of folia's ThreadedRegionizer
*/
public interface ThreadedRegionizer {
/**
* Get all the tick regions
* @return Temporary copied collection of all tick regions
*/
Collection<ThreadedRegion> getAllRegions();
/**
* Get the tick region at the given chunk coordinates
* @param chunkX Chunk X
* @param chunkZ Chunk Z
* @return The tick region at the given chunk coordinates
*/
@Nullable
ThreadedRegion getAtSynchronized(int chunkX, int chunkZ);
/**
* Get the tick region at the given chunk coordinates
* @param chunkX Chunk X
* @param chunkZ Chunk Z
* @return The tick region at the given chunk coordinates
*/
@Nullable
ThreadedRegion getAtUnSynchronized(int chunkX, int chunkZ);
/**
* Get the tick region at the given location
* @param pos The location
* @return The tick region at the given location
*/
@Nullable
default ThreadedRegion getAtSynchronized(@NotNull Location pos) {
return this.getAtSynchronized(pos.getBlockX() >> 4, pos.getBlockZ() >> 4);
}
/**
* Get the tick region at the given location
* @param pos The location
* @return The tick region at the given location
*/
@Nullable
default ThreadedRegion getAtUnSynchronized(@NotNull Location pos) {
return this.getAtUnSynchronized(pos.getBlockX() >> 4, pos.getBlockZ() >> 4);
}
}

View File

@@ -0,0 +1,26 @@
package me.earthme.luminol.api;
import org.bukkit.World;
/**
* A mirror of folia's tick region data
*/
public interface TickRegionData {
/**
* Get the world it's currently holding
* @return the world
*/
World getWorld();
/**
* Get the current tick count
* @return the current tick count
*/
long getCurrentTickCount();
/**
* Get the region stats
* @return the region stats
*/
RegionStats getRegionStats();
}

View File

@@ -0,0 +1,68 @@
package me.earthme.luminol.api.entity;
import org.apache.commons.lang3.Validate;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.jetbrains.annotations.NotNull;
/**
* A simple event fired when a teleportAsync was called
* @see org.bukkit.entity.Entity#teleportAsync(org.bukkit.Location, org.bukkit.event.player.PlayerTeleportEvent.TeleportCause)
* @see org.bukkit.entity.Entity#teleportAsync(org.bukkit.Location)
* (Also fired when teleportAsync called from nms)
*/
public class EntityTeleportAsyncEvent extends Event {
private static final HandlerList HANDLERS = new HandlerList();
private final Entity entity;
private final PlayerTeleportEvent.TeleportCause teleportCause;
private final Location destination;
public EntityTeleportAsyncEvent(Entity entity, PlayerTeleportEvent.TeleportCause teleportCause, Location destination) {
Validate.notNull(entity, "entity cannot be a null value!");
Validate.notNull(teleportCause, "teleportCause cannot be a null value!");
Validate.notNull(destination, "destination cannot be a null value!");
this.entity = entity;
this.teleportCause = teleportCause;
this.destination = destination;
}
/**
* Get the entity is about to be teleported
* @return that entity
*/
public @NotNull Entity getEntity() {
return this.entity;
}
/**
* Get the cause of the teleport
* @return the cause
*/
public @NotNull PlayerTeleportEvent.TeleportCause getTeleportCause() {
return this.teleportCause;
}
/**
* Get the destination of the teleport
* @return the destination
*/
public @NotNull Location getDestination() {
return this.destination;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLERS;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLERS;
}
}

View File

@@ -0,0 +1,41 @@
package me.earthme.luminol.api.entity;
import org.apache.commons.lang3.Validate;
import org.bukkit.entity.Entity;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
/**
* A simple event created for missing teleport events api of folia
* This event is fired when the entity portal process has been done
*/
public class PostEntityPortalEvent extends Event {
private static final HandlerList HANDLER_LIST = new HandlerList();
private final Entity teleportedEntity;
public PostEntityPortalEvent(Entity teleportedEntity) {
Validate.notNull(teleportedEntity, "teleportedEntity cannot be null!");
this.teleportedEntity = teleportedEntity;
}
/**
* Get the entity which was teleported
* @return the entity which was teleported
*/
public Entity getTeleportedEntity() {
return this.teleportedEntity;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLER_LIST;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLER_LIST;
}
}

View File

@@ -0,0 +1,78 @@
package me.earthme.luminol.api.entity;
import org.apache.commons.lang3.Validate;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Entity;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
/**
* A simple event created for missing teleport events api of folia
* This event will be fired when a portal teleportation is about to happen
*/
public class PreEntityPortalEvent extends Event implements Cancellable {
private static final HandlerList HANDLERS = new HandlerList();
private final Entity entity;
private final Location portalPos;
private final World destination;
private boolean cancelled = false;
public PreEntityPortalEvent(Entity entity, Location portalPos, World destination) {
Validate.notNull(entity, "entity cannot be null!");
Validate.notNull(portalPos, "portalPos cannot be null!");
Validate.notNull(destination, "destination cannot be null!");
this.entity = entity;
this.portalPos = portalPos;
this.destination = destination;
}
/**
* Get the entity that is about to teleport
* @return the entity
*/
public @NotNull Entity getEntity() {
return this.entity;
}
/**
* Get the location of the portal
* @return the portal location
*/
public @NotNull Location getPortalPos() {
return this.portalPos;
}
/**
* Get the destination world
* @return the destination world
*/
public @NotNull World getDestination() {
return this.destination;
}
@Override
public boolean isCancelled() {
return this.cancelled;
}
@Override
public void setCancelled(boolean cancel) {
this.cancelled = cancel;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLERS;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLERS;
}
}

View File

@@ -0,0 +1,40 @@
package me.earthme.luminol.api.entity.player;
import org.apache.commons.lang3.Validate;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
/**
* A simple event fired when the respawn process of player is done
*/
public class PostPlayerRespawnEvent extends Event {
private static final HandlerList HANDLERS = new HandlerList();
private final Player player;
public PostPlayerRespawnEvent(Player player) {
Validate.notNull(player, "Player cannot be a null value!");
this.player = player;
}
/**
* Get the respawned player
* @return the player
*/
public @NotNull Player getPlayer() {
return this.player;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLERS;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLERS;
}
}

View File

@@ -0,0 +1,35 @@
package me.earthme.luminol.api.portal;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
/**
* A event fired when an end platform is created.
*/
public class EndPlatformCreateEvent extends Event implements Cancellable {
private static final HandlerList HANDLERS = new HandlerList();
private boolean cancelled = false;
@Override
public boolean isCancelled() {
return this.cancelled;
}
@Override
public void setCancelled(boolean cancel) {
this.cancelled = cancel;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLERS;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLERS;
}
}

View File

@@ -0,0 +1,53 @@
package me.earthme.luminol.api.portal;
import org.apache.commons.lang3.Validate;
import org.bukkit.Location;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
/**
* A event fired when the portal process started locating the destination position
* Notice: If you changed the destination to an another position in end teleportation.The end platform won't create under the entity and won't create
* if the position is out of current tick region
*/
public class PortalLocateEvent extends Event {
private static final HandlerList HANDLERS = new HandlerList();
private final Location original;
private final Location destination;
public PortalLocateEvent(Location original, Location destination) {
Validate.notNull(original, "original couldn't be null!");
Validate.notNull(destination, "destination couldn't be null!");
this.original = original;
this.destination = destination;
}
/**
* Get the destination position of this teleportation
* @return the destination position
*/
public Location getDestination() {
return this.destination;
}
/**
* Get the original portal position of this teleportation
* @return the original portal position
*/
public Location getOriginal() {
return this.original;
}
@Override
public @NotNull HandlerList getHandlers() {
return HANDLERS;
}
@NotNull
public static HandlerList getHandlerList() {
return HANDLERS;
}
}

View File

@@ -1,44 +0,0 @@
--- /dev/null
+++ b/src/main/java/abomination/IRegionFile.java
@@ -1,0 +_,41 @@
+package abomination;
+
+import ca.spottedleaf.moonrise.patches.chunk_system.storage.ChunkSystemRegionFile;
+import net.minecraft.nbt.CompoundTag;
+import net.minecraft.world.level.ChunkPos;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.file.Path;
+
+public interface IRegionFile extends ChunkSystemRegionFile, AutoCloseable {
+ Path getPath();
+
+ DataInputStream getChunkDataInputStream(ChunkPos pos) throws IOException;
+
+ boolean doesChunkExist(ChunkPos pos) throws Exception;
+
+ DataOutputStream getChunkDataOutputStream(ChunkPos pos) throws IOException;
+
+ void flush() throws IOException;
+
+ void clear(ChunkPos pos) throws IOException;
+
+ boolean hasChunk(ChunkPos pos);
+
+ void close() throws IOException;
+
+ void write(ChunkPos pos, ByteBuffer buf) throws IOException;
+
+ CompoundTag getOversizedData(int x, int z) throws IOException;
+
+ boolean isOversized(int x, int z);
+
+ boolean recalculateHeader() throws IOException;
+
+ void setOversized(int x, int z, boolean oversized) throws IOException;
+
+ default int getRecalculateCount() {return 0;} // Luminol - Configurable region file format
+}

View File

@@ -1,625 +0,0 @@
--- /dev/null
+++ b/src/main/java/abomination/LinearRegionFile.java
@@ -1,0 +_,622 @@
+package abomination;
+
+import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO;
+import com.github.luben.zstd.ZstdInputStream;
+import com.github.luben.zstd.ZstdOutputStream;
+import com.mojang.logging.LogUtils;
+import net.jpountz.lz4.LZ4Compressor;
+import net.jpountz.lz4.LZ4Factory;
+import net.jpountz.lz4.LZ4FastDecompressor;
+import net.openhft.hashing.LongHashFunction;
+import net.minecraft.nbt.CompoundTag;
+import net.minecraft.world.level.chunk.storage.RegionStorageInfo;
+import net.minecraft.world.level.chunk.storage.RegionFileVersion;
+import net.minecraft.world.level.ChunkPos;
+import org.slf4j.Logger;
+
+import javax.annotation.Nullable;
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.LockSupport;
+import java.util.concurrent.locks.ReentrantLock;
+
+// LinearRegionFile_implementation_version_0_5byXymb
+// Just gonna use this string to inform other forks about updates ;-)
+public class LinearRegionFile implements IRegionFile{
+ private static final long SUPERBLOCK = 0xc3ff13183cca9d9aL;
+ private static final byte VERSION = 3;
+ private static final int HEADER_SIZE = 27;
+ private static final int FOOTER_SIZE = 8;
+ private static final Logger LOGGER = LogUtils.getLogger();
+
+ private byte[][] bucketBuffers;
+ private final byte[][] buffer = new byte[1024][];
+ private final int[] bufferUncompressedSize = new int[1024];
+
+ private final long[] chunkTimestamps = new long[1024];
+ private final Object markedToSaveLock = new Object();
+
+ private final LZ4Compressor compressor;
+ private final LZ4FastDecompressor decompressor;
+
+ private boolean markedToSave = false;
+ private boolean close = false;
+
+ public final ReentrantLock fileLock = new ReentrantLock(true);
+ public Path regionFile;
+
+ private final int compressionLevel;
+ private int gridSize = 8;
+ private int bucketSize = 4;
+ private final Thread bindThread;
+
+ public Path getRegionFile() {
+ return this.regionFile;
+ }
+
+ public ReentrantLock getFileLock() {
+ return this.fileLock;
+ }
+
+ private int chunkToBucketIdx(int chunkX, int chunkZ) {
+ int bx = chunkX / bucketSize, bz = chunkZ / bucketSize;
+ return bx * gridSize + bz;
+ }
+
+ private void openBucket(int chunkX, int chunkZ) {
+ chunkX = Math.floorMod(chunkX, 32);
+ chunkZ = Math.floorMod(chunkZ, 32);
+ int idx = chunkToBucketIdx(chunkX, chunkZ);
+
+ if (bucketBuffers == null) return;
+ if (bucketBuffers[idx] != null) {
+ try {
+ ByteArrayInputStream bucketByteStream = new ByteArrayInputStream(bucketBuffers[idx]);
+ ZstdInputStream zstdStream = new ZstdInputStream(bucketByteStream);
+ ByteBuffer bucketBuffer = ByteBuffer.wrap(zstdStream.readAllBytes());
+
+ int bx = chunkX / bucketSize, bz = chunkZ / bucketSize;
+
+ for (int cx = 0; cx < 32 / gridSize; cx++) {
+ for (int cz = 0; cz < 32 / gridSize; cz++) {
+ int chunkIndex = (bx * (32 / gridSize) + cx) + (bz * (32 / gridSize) + cz) * 32;
+
+ int chunkSize = bucketBuffer.getInt();
+ long timestamp = bucketBuffer.getLong();
+ this.chunkTimestamps[chunkIndex] = timestamp;
+
+ if (chunkSize > 0) {
+ byte[] chunkData = new byte[chunkSize - 8];
+ bucketBuffer.get(chunkData);
+
+ int maxCompressedLength = this.compressor.maxCompressedLength(chunkData.length);
+ byte[] compressed = new byte[maxCompressedLength];
+ int compressedLength = this.compressor.compress(chunkData, 0, chunkData.length, compressed, 0, maxCompressedLength);
+ byte[] finalCompressed = new byte[compressedLength];
+ System.arraycopy(compressed, 0, finalCompressed, 0, compressedLength);
+
+ // TODO: Optimization - return the requested chunk immediately to save on one LZ4 decompression
+ this.buffer[chunkIndex] = finalCompressed;
+ this.bufferUncompressedSize[chunkIndex] = chunkData.length;
+ }
+ }
+ }
+ } catch (IOException ex) {
+ throw new RuntimeException("Region file corrupted: " + regionFile + " bucket: " + idx);
+ // TODO: Make sure the server crashes instead of corrupting the world
+ }
+ bucketBuffers[idx] = null;
+ }
+ }
+
+ public boolean regionFileOpen = false;
+
+ private synchronized void openRegionFile() {
+ if (regionFileOpen) return;
+ regionFileOpen = true;
+
+ File regionFile = new File(this.regionFile.toString());
+
+ if(!regionFile.canRead()) {
+ this.bindThread.start();
+ return;
+ }
+
+ try {
+ byte[] fileContent = Files.readAllBytes(this.regionFile);
+ ByteBuffer buffer = ByteBuffer.wrap(fileContent);
+
+ long superBlock = buffer.getLong();
+ if (superBlock != SUPERBLOCK)
+ throw new RuntimeException("Invalid superblock: " + superBlock + " file " + this.regionFile);
+
+ byte version = buffer.get();
+ if (version == 1 || version == 2) {
+ parseLinearV1(buffer);
+ } else if (version == 3) {
+ parseLinearV2(buffer);
+ } else {
+ throw new RuntimeException("Invalid version: " + version + " file " + this.regionFile);
+ }
+
+ this.bindThread.start();
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to open region file " + this.regionFile, e);
+ }
+ }
+
+ private void parseLinearV1(ByteBuffer buffer) throws IOException {
+ final int HEADER_SIZE = 32;
+ final int FOOTER_SIZE = 8;
+
+ // Skip newestTimestamp (Long) + Compression level (Byte) + Chunk count (Short): Unused.
+ buffer.position(buffer.position() + 11);
+
+ int dataCount = buffer.getInt();
+ long fileLength = this.regionFile.toFile().length();
+ if (fileLength != HEADER_SIZE + dataCount + FOOTER_SIZE) {
+ throw new IOException("Invalid file length: " + this.regionFile + " " + fileLength + " " + (HEADER_SIZE + dataCount + FOOTER_SIZE));
+ }
+
+ buffer.position(buffer.position() + 8); // Skip data hash (Long): Unused.
+
+ byte[] rawCompressed = new byte[dataCount];
+ buffer.get(rawCompressed);
+
+ ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(rawCompressed);
+ ZstdInputStream zstdInputStream = new ZstdInputStream(byteArrayInputStream);
+ ByteBuffer decompressedBuffer = ByteBuffer.wrap(zstdInputStream.readAllBytes());
+
+ int[] starts = new int[1024];
+ for (int i = 0; i < 1024; i++) {
+ starts[i] = decompressedBuffer.getInt();
+ decompressedBuffer.getInt(); // Skip timestamps (Int): Unused.
+ }
+
+ for (int i = 0; i < 1024; i++) {
+ if (starts[i] > 0) {
+ int size = starts[i];
+ byte[] chunkData = new byte[size];
+ decompressedBuffer.get(chunkData);
+
+ int maxCompressedLength = this.compressor.maxCompressedLength(size);
+ byte[] compressed = new byte[maxCompressedLength];
+ int compressedLength = this.compressor.compress(chunkData, 0, size, compressed, 0, maxCompressedLength);
+ byte[] finalCompressed = new byte[compressedLength];
+ System.arraycopy(compressed, 0, finalCompressed, 0, compressedLength);
+
+ this.buffer[i] = finalCompressed;
+ this.bufferUncompressedSize[i] = size;
+ this.chunkTimestamps[i] = getTimestamp(); // Use current timestamp as we don't have the original
+ }
+ }
+ }
+
+ private void parseLinearV2(ByteBuffer buffer) throws IOException {
+ buffer.getLong(); // Skip newestTimestamp (Long)
+ gridSize = buffer.get();
+ if (gridSize != 1 && gridSize != 2 && gridSize != 4 && gridSize != 8 && gridSize != 16 && gridSize != 32)
+ throw new RuntimeException("Invalid grid size: " + gridSize + " file " + this.regionFile);
+ bucketSize = 32 / gridSize;
+
+ buffer.getInt(); // Skip region_x (Int)
+ buffer.getInt(); // Skip region_z (Int)
+
+ boolean[] chunkExistenceBitmap = deserializeExistenceBitmap(buffer);
+
+ while (true) {
+ byte featureNameLength = buffer.get();
+ if (featureNameLength == 0) break;
+ byte[] featureNameBytes = new byte[featureNameLength];
+ buffer.get(featureNameBytes);
+ String featureName = new String(featureNameBytes);
+ int featureValue = buffer.getInt();
+ // System.out.println("NBT Feature: " + featureName + " = " + featureValue);
+ }
+
+ int[] bucketSizes = new int[gridSize * gridSize];
+ byte[] bucketCompressionLevels = new byte[gridSize * gridSize];
+ long[] bucketHashes = new long[gridSize * gridSize];
+ for (int i = 0; i < gridSize * gridSize; i++) {
+ bucketSizes[i] = buffer.getInt();
+ bucketCompressionLevels[i] = buffer.get();
+ bucketHashes[i] = buffer.getLong();
+ }
+
+ bucketBuffers = new byte[gridSize * gridSize][];
+ for (int i = 0; i < gridSize * gridSize; i++) {
+ if (bucketSizes[i] > 0) {
+ bucketBuffers[i] = new byte[bucketSizes[i]];
+ buffer.get(bucketBuffers[i]);
+ long rawHash = LongHashFunction.xx().hashBytes(bucketBuffers[i]);
+ if (rawHash != bucketHashes[i]) throw new IOException("Region file hash incorrect " + this.regionFile);
+ }
+ }
+
+ long footerSuperBlock = buffer.getLong();
+ if (footerSuperBlock != SUPERBLOCK)
+ throw new IOException("Footer superblock invalid " + this.regionFile);
+ }
+
+ public LinearRegionFile(RegionStorageInfo storageKey, Path directory, Path path, boolean dsync, int compressionLevel) throws IOException {
+ this(storageKey, directory, path, RegionFileVersion.getCompressionFormat(), dsync, compressionLevel);
+ }
+
+ public LinearRegionFile(RegionStorageInfo storageKey, Path path, Path directory, RegionFileVersion compressionFormat, boolean dsync, int compressionLevel) throws IOException {
+ Runnable flushCheck = () -> {
+ while (!close) {
+ synchronized (saveLock) {
+ if (markedToSave && activeSaveThreads < SAVE_THREAD_MAX_COUNT) {
+ activeSaveThreads++;
+ Runnable flushOperation = () -> {
+ try {
+ flush();
+ } catch (IOException ex) {
+ LOGGER.error("Region file {} flush failed", this.regionFile.toAbsolutePath(), ex);
+ } finally {
+ synchronized (saveLock) {
+ activeSaveThreads--;
+ }
+ }
+ };
+
+ Thread saveThread = USE_VIRTUAL_THREAD ?
+ Thread.ofVirtual().name("Linear IO - " + LinearRegionFile.this.hashCode()).unstarted(flushOperation) :
+ Thread.ofPlatform().name("Linear IO - " + LinearRegionFile.this.hashCode()).unstarted(flushOperation);
+ saveThread.setPriority(Thread.NORM_PRIORITY - 3);
+ saveThread.start();
+ }
+ }
+ LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(SAVE_DELAY_MS));
+ }
+ };
+ this.bindThread = USE_VIRTUAL_THREAD ? Thread.ofVirtual().unstarted(flushCheck) : Thread.ofPlatform().unstarted(flushCheck);
+ this.bindThread.setName("Linear IO Schedule - " + this.hashCode());
+ this.regionFile = path;
+ this.compressionLevel = compressionLevel;
+
+ this.compressor = LZ4Factory.fastestInstance().fastCompressor();
+ this.decompressor = LZ4Factory.fastestInstance().fastDecompressor();
+ }
+
+ private synchronized void markToSave() {
+ synchronized(markedToSaveLock) {
+ markedToSave = true;
+ }
+ }
+
+ private synchronized boolean isMarkedToSave() {
+ synchronized(markedToSaveLock) {
+ if(markedToSave) {
+ markedToSave = false;
+ return true;
+ }
+ return false;
+ }
+ }
+
+ public static int SAVE_THREAD_MAX_COUNT = 6;
+ public static int SAVE_DELAY_MS = 100;
+ public static boolean USE_VIRTUAL_THREAD = true;
+ private static final Object saveLock = new Object();
+ private static int activeSaveThreads = 0;
+
+ /*public void run() {
+ while (!close) {
+ synchronized (saveLock) {
+ if (markedToSave && activeSaveThreads < SAVE_THREAD_MAX_COUNT) {
+ activeSaveThreads++;
+ Thread saveThread = new Thread(() -> {
+ try {
+ flush();
+ } catch (IOException ex) {
+ LOGGER.error("Region file " + this.regionFile.toAbsolutePath() + " flush failed", ex);
+ } finally {
+ synchronized (saveLock) {
+ activeSaveThreads--;
+ }
+ }
+ }, "RegionFileFlush");
+ saveThread.setPriority(Thread.NORM_PRIORITY - 3);
+ saveThread.start();
+ }
+ }
+ LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(SAVE_DELAY_MS));
+ }
+ }*/
+
+ public synchronized boolean doesChunkExist(ChunkPos pos) throws Exception {
+ openRegionFile();
+ throw new Exception("doesChunkExist is a stub");
+ }
+
+ public synchronized void flush() throws IOException {
+ if(!isMarkedToSave()) return;
+
+ openRegionFile();
+
+ long timestamp = getTimestamp();
+
+long writeStart = System.nanoTime();
+ File tempFile = new File(regionFile.toString() + ".tmp");
+ FileOutputStream fileStream = new FileOutputStream(tempFile);
+ DataOutputStream dataStream = new DataOutputStream(fileStream);
+
+ dataStream.writeLong(SUPERBLOCK);
+ dataStream.writeByte(VERSION);
+ dataStream.writeLong(timestamp);
+ dataStream.writeByte(gridSize);
+
+ String fileName = regionFile.getFileName().toString();
+ String[] parts = fileName.split("\\.");
+ int regionX = 0;
+ int regionZ = 0;
+ try {
+ if (parts.length >= 4) {
+ regionX = Integer.parseInt(parts[1]);
+ regionZ = Integer.parseInt(parts[2]);
+ } else {
+ LOGGER.warn("Unexpected file name format: " + fileName);
+ }
+ } catch (NumberFormatException e) {
+ LOGGER.error("Failed to parse region coordinates from file name: " + fileName, e);
+ }
+
+ dataStream.writeInt(regionX);
+ dataStream.writeInt(regionZ);
+
+ boolean[] chunkExistenceBitmap = new boolean[1024];
+ for (int i = 0; i < 1024; i++) {
+ chunkExistenceBitmap[i] = (this.bufferUncompressedSize[i] > 0);
+ }
+ writeSerializedExistenceBitmap(dataStream, chunkExistenceBitmap);
+
+ writeNBTFeatures(dataStream);
+
+ int bucketMisses = 0;
+ byte[][] buckets = new byte[gridSize * gridSize][];
+ for (int bx = 0; bx < gridSize; bx++) {
+ for (int bz = 0; bz < gridSize; bz++) {
+ if (bucketBuffers != null && bucketBuffers[bx * gridSize + bz] != null) {
+ buckets[bx * gridSize + bz] = bucketBuffers[bx * gridSize + bz];
+ continue;
+ }
+ bucketMisses++;
+
+ ByteArrayOutputStream bucketStream = new ByteArrayOutputStream();
+ ZstdOutputStream zstdStream = new ZstdOutputStream(bucketStream, this.compressionLevel);
+ DataOutputStream bucketDataStream = new DataOutputStream(zstdStream);
+
+ boolean hasData = false;
+ for (int cx = 0; cx < 32 / gridSize; cx++) {
+ for (int cz = 0; cz < 32 / gridSize; cz++) {
+ int chunkIndex = (bx * 32 / gridSize + cx) + (bz * 32 / gridSize + cz) * 32;
+ if (this.bufferUncompressedSize[chunkIndex] > 0) {
+ hasData = true;
+ byte[] chunkData = new byte[this.bufferUncompressedSize[chunkIndex]];
+ this.decompressor.decompress(this.buffer[chunkIndex], 0, chunkData, 0, this.bufferUncompressedSize[chunkIndex]);
+ bucketDataStream.writeInt(chunkData.length + 8);
+ bucketDataStream.writeLong(this.chunkTimestamps[chunkIndex]);
+ bucketDataStream.write(chunkData);
+ } else {
+ bucketDataStream.writeInt(0);
+ bucketDataStream.writeLong(this.chunkTimestamps[chunkIndex]);
+ }
+ }
+ }
+ bucketDataStream.close();
+
+ if (hasData) {
+ buckets[bx * gridSize + bz] = bucketStream.toByteArray();
+ }
+ }
+ }
+
+ for (int i = 0; i < gridSize * gridSize; i++) {
+ dataStream.writeInt(buckets[i] != null ? buckets[i].length : 0);
+ dataStream.writeByte(this.compressionLevel);
+ long rawHash = 0;
+ if (buckets[i] != null) {
+ rawHash = LongHashFunction.xx().hashBytes(buckets[i]);
+ }
+ dataStream.writeLong(rawHash);
+ }
+
+ for (int i = 0; i < gridSize * gridSize; i++) {
+ if (buckets[i] != null) {
+ dataStream.write(buckets[i]);
+ }
+ }
+
+ dataStream.writeLong(SUPERBLOCK);
+
+ dataStream.flush();
+ fileStream.getFD().sync();
+ fileStream.getChannel().force(true); // Ensure atomicity on Btrfs
+ dataStream.close();
+
+ fileStream.close();
+ Files.move(tempFile.toPath(), this.regionFile, StandardCopyOption.REPLACE_EXISTING);
+//System.out.println("writeStart REGION FILE FLUSH " + (System.nanoTime() - writeStart) + " misses: " + bucketMisses);
+ }
+
+ private void writeNBTFeatures(DataOutputStream dataStream) throws IOException {
+ // writeNBTFeature(dataStream, "example", 1);
+ dataStream.writeByte(0); // End of NBT features
+ }
+
+ private void writeNBTFeature(DataOutputStream dataStream, String featureName, int featureValue) throws IOException {
+ byte[] featureNameBytes = featureName.getBytes();
+ dataStream.writeByte(featureNameBytes.length);
+ dataStream.write(featureNameBytes);
+ dataStream.writeInt(featureValue);
+ }
+
+ public static final int MAX_CHUNK_SIZE = 500 * 1024 * 1024; // Abomination - prevent chunk dupe
+
+ public synchronized void write(ChunkPos pos, ByteBuffer buffer) {
+ openRegionFile();
+ openBucket(pos.x, pos.z);
+ try {
+ byte[] b = toByteArray(new ByteArrayInputStream(buffer.array()));
+ int uncompressedSize = b.length;
+
+ if (uncompressedSize > MAX_CHUNK_SIZE) {
+ LOGGER.error("Chunk dupe attempt " + this.regionFile);
+ clear(pos);
+ } else {
+ int maxCompressedLength = this.compressor.maxCompressedLength(b.length);
+ byte[] compressed = new byte[maxCompressedLength];
+ int compressedLength = this.compressor.compress(b, 0, b.length, compressed, 0, maxCompressedLength);
+ b = new byte[compressedLength];
+ System.arraycopy(compressed, 0, b, 0, compressedLength);
+
+ int index = getChunkIndex(pos.x, pos.z);
+ this.buffer[index] = b;
+ this.chunkTimestamps[index] = getTimestamp();
+ this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] = uncompressedSize;
+ }
+ } catch (IOException e) {
+ LOGGER.error("Chunk write IOException " + e + " " + this.regionFile);
+ }
+ markToSave();
+ }
+
+ public DataOutputStream getChunkDataOutputStream(ChunkPos pos) {
+ openRegionFile();
+ openBucket(pos.x, pos.z);
+ return new DataOutputStream(new BufferedOutputStream(new LinearRegionFile.ChunkBuffer(pos)));
+ }
+
+ @Override
+ public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) throws IOException {
+ final DataOutputStream out = this.getChunkDataOutputStream(pos);
+
+ return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData(
+ data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE,
+ out, regionFile -> out.close()
+ );
+ }
+
+ private class ChunkBuffer extends ByteArrayOutputStream {
+
+ private final ChunkPos pos;
+
+ public ChunkBuffer(ChunkPos chunkcoordintpair) {
+ super();
+ this.pos = chunkcoordintpair;
+ }
+
+ public void close() throws IOException {
+ ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count);
+ LinearRegionFile.this.write(this.pos, bytebuffer);
+ }
+ }
+
+ private byte[] toByteArray(InputStream in) throws IOException {
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ byte[] tempBuffer = new byte[4096];
+
+ int length;
+ while ((length = in.read(tempBuffer)) >= 0) {
+ out.write(tempBuffer, 0, length);
+ }
+
+ return out.toByteArray();
+ }
+
+ @Nullable
+ public synchronized DataInputStream getChunkDataInputStream(ChunkPos pos) {
+ openRegionFile();
+ openBucket(pos.x, pos.z);
+
+ if(this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] != 0) {
+ byte[] content = new byte[bufferUncompressedSize[getChunkIndex(pos.x, pos.z)]];
+ this.decompressor.decompress(this.buffer[getChunkIndex(pos.x, pos.z)], 0, content, 0, bufferUncompressedSize[getChunkIndex(pos.x, pos.z)]);
+ return new DataInputStream(new ByteArrayInputStream(content));
+ }
+ return null;
+ }
+
+ public synchronized void clear(ChunkPos pos) {
+ openRegionFile();
+ openBucket(pos.x, pos.z);
+ int i = getChunkIndex(pos.x, pos.z);
+ this.buffer[i] = null;
+ this.bufferUncompressedSize[i] = 0;
+ this.chunkTimestamps[i] = 0;
+ markToSave();
+ }
+
+ public synchronized boolean hasChunk(ChunkPos pos) {
+ openRegionFile();
+ openBucket(pos.x, pos.z);
+ return this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] > 0;
+ }
+
+ public synchronized void close() throws IOException {
+ openRegionFile();
+ close = true;
+ try {
+ flush();
+ } catch(IOException e) {
+ throw new IOException("Region flush IOException " + e + " " + this.regionFile);
+ }
+ }
+
+ private static int getChunkIndex(int x, int z) {
+ return (x & 31) + ((z & 31) << 5);
+ }
+
+ private static int getTimestamp() {
+ return (int) (System.currentTimeMillis() / 1000L);
+ }
+
+ public boolean recalculateHeader() {
+ return false;
+ }
+
+ public void setOversized(int x, int z, boolean something) {}
+
+ public CompoundTag getOversizedData(int x, int z) throws IOException {
+ throw new IOException("getOversizedData is a stub " + this.regionFile);
+ }
+
+ public boolean isOversized(int x, int z) {
+ return false;
+ }
+
+ public Path getPath() {
+ return this.regionFile;
+ }
+
+ private boolean[] deserializeExistenceBitmap(ByteBuffer buffer) {
+ boolean[] result = new boolean[1024];
+ for (int i = 0; i < 128; i++) {
+ byte b = buffer.get();
+ for (int j = 0; j < 8; j++) {
+ result[i * 8 + j] = ((b >> (7 - j)) & 1) == 1;
+ }
+ }
+ return result;
+ }
+
+ private void writeSerializedExistenceBitmap(DataOutputStream out, boolean[] bitmap) throws IOException {
+ for (int i = 0; i < 128; i++) {
+ byte b = 0;
+ for (int j = 0; j < 8; j++) {
+ if (bitmap[i * 8 + j]) {
+ b |= (1 << (7 - j));
+ }
+ }
+ out.writeByte(b);
+ }
+ }
+}

View File

@@ -1,93 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/kiocg/ChunkHot.java
@@ -1,0 +_,90 @@
+package com.kiocg;
+
+import java.util.Arrays;
+
+public class ChunkHot {
+ // 热度统计总区间数量
+ private static final int TIMES_LENGTH = 10;
+ // 当前统计区间下标
+ private int index = -1;
+
+ // 热度统计区间
+ private final long[] times = new long[TIMES_LENGTH];
+ // 存放临时的区间数值
+ // 用于修正正在统计的当前区间热度没有计入总值的问题
+ private long temp;
+ // 所有区间的热度总值
+ private long total;
+
+ // 用于每个具体统计的计算
+ private long nanos;
+ // 当前统计是否进行中
+ private volatile boolean started = false;
+
+ /**
+ * 更新区间下标
+ */
+ public void nextTick() {
+ this.index = ++this.index % TIMES_LENGTH;
+ }
+
+ /**
+ * 开始统计一个新区间
+ */
+ public void start() {
+ started = true;
+ temp = times[this.index];
+ times[this.index] = 0L;
+ }
+
+ public boolean isStarted(){
+ return this.started;
+ }
+
+ /**
+ * 结束当前区间的统计
+ * 将统计值更新入热度总值
+ */
+ public void stop() {
+ started = false;
+ total -= temp;
+ total += times[this.index];
+ }
+
+ /**
+ * 开始一个具体统计
+ */
+ public void startTicking() {
+ if (!started) return;
+ nanos = System.nanoTime();
+ }
+
+ /**
+ * 结束一个具体统计
+ * 将统计值计入当前热度区间
+ */
+ public void stopTickingAndCount() {
+ if (!started) return;
+ // 定义一个具体统计的最大值为 1,000,000
+ // 有时候某个具体统计的计算值会在某1刻飙升可能是由于保存数据到磁盘
+ times[this.index] += Math.min(System.nanoTime() - nanos, 1000000L);
+ }
+
+ /**
+ * 清空统计 (当区块卸载时)
+ */
+ public void clear() {
+ started = false;
+ Arrays.fill(times, 0L);
+ temp = 0L;
+ total = 0L;
+ nanos = 0L;
+ }
+
+ /**
+ * @return 获取区块热度平均值
+ */
+ public long getAverage() {
+ return total / ((long) TIMES_LENGTH * 20L);
+ }
+}

View File

@@ -1,37 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/DataProvider.java
@@ -1,0 +_,34 @@
+package com.logisticscraft.occlusionculling;
+
+import com.logisticscraft.occlusionculling.util.Vec3d;
+
+public interface DataProvider {
+
+ /**
+ * Prepares the requested chunk. Returns true if the chunk is ready, false when
+ * not loaded. Should not reload the chunk when the x and y are the same as the
+ * last request!
+ *
+ * @param chunkX
+ * @param chunkZ
+ * @return
+ */
+ boolean prepareChunk(int chunkX, int chunkZ);
+
+ /**
+ * Location is inside the chunk.
+ *
+ * @param x
+ * @param y
+ * @param z
+ * @return
+ */
+ boolean isOpaqueFullCube(int x, int y, int z);
+
+ default void cleanup() {
+ }
+
+ default void checkingPosition(Vec3d[] targetPoints, int size, Vec3d viewerPosition) {
+ }
+
+}

View File

@@ -1,518 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/OcclusionCullingInstance.java
@@ -1,0 +_,515 @@
+package com.logisticscraft.occlusionculling;
+
+import java.util.Arrays;
+import java.util.BitSet;
+
+import com.logisticscraft.occlusionculling.cache.ArrayOcclusionCache;
+import com.logisticscraft.occlusionculling.cache.OcclusionCache;
+import com.logisticscraft.occlusionculling.util.MathUtilities;
+import com.logisticscraft.occlusionculling.util.Vec3d;
+
+public class OcclusionCullingInstance {
+
+ private static final int ON_MIN_X = 0x01;
+ private static final int ON_MAX_X = 0x02;
+ private static final int ON_MIN_Y = 0x04;
+ private static final int ON_MAX_Y = 0x08;
+ private static final int ON_MIN_Z = 0x10;
+ private static final int ON_MAX_Z = 0x20;
+
+ private final int reach;
+ private final double aabbExpansion;
+ private final DataProvider provider;
+ private final OcclusionCache cache;
+
+ // Reused allocated data structures
+ private final BitSet skipList = new BitSet(); // Grows bigger in case some mod introduces giant hitboxes
+ private final Vec3d[] targetPoints = new Vec3d[15];
+ private final Vec3d targetPos = new Vec3d(0, 0, 0);
+ private final int[] cameraPos = new int[3];
+ private final boolean[] dotselectors = new boolean[14];
+ private boolean allowRayChecks = false;
+ private final int[] lastHitBlock = new int[3];
+ private boolean allowWallClipping = false;
+
+
+ public OcclusionCullingInstance(int maxDistance, DataProvider provider) {
+ this(maxDistance, provider, new ArrayOcclusionCache(maxDistance), 0.5);
+ }
+
+ public OcclusionCullingInstance(int maxDistance, DataProvider provider, OcclusionCache cache, double aabbExpansion) {
+ this.reach = maxDistance;
+ this.provider = provider;
+ this.cache = cache;
+ this.aabbExpansion = aabbExpansion;
+ for(int i = 0; i < targetPoints.length; i++) {
+ targetPoints[i] = new Vec3d(0, 0, 0);
+ }
+ }
+
+ public boolean isAABBVisible(Vec3d aabbMin, Vec3d aabbMax, Vec3d viewerPosition) {
+ try {
+ int maxX = MathUtilities.floor(aabbMax.x
+ + aabbExpansion);
+ int maxY = MathUtilities.floor(aabbMax.y
+ + aabbExpansion);
+ int maxZ = MathUtilities.floor(aabbMax.z
+ + aabbExpansion);
+ int minX = MathUtilities.floor(aabbMin.x
+ - aabbExpansion);
+ int minY = MathUtilities.floor(aabbMin.y
+ - aabbExpansion);
+ int minZ = MathUtilities.floor(aabbMin.z
+ - aabbExpansion);
+
+ cameraPos[0] = MathUtilities.floor(viewerPosition.x);
+ cameraPos[1] = MathUtilities.floor(viewerPosition.y);
+ cameraPos[2] = MathUtilities.floor(viewerPosition.z);
+
+ Relative relX = Relative.from(minX, maxX, cameraPos[0]);
+ Relative relY = Relative.from(minY, maxY, cameraPos[1]);
+ Relative relZ = Relative.from(minZ, maxZ, cameraPos[2]);
+
+ if(relX == Relative.INSIDE && relY == Relative.INSIDE && relZ == Relative.INSIDE) {
+ return true; // We are inside of the AABB, don't cull
+ }
+
+ skipList.clear();
+
+ // Just check the cache first
+ int id = 0;
+ for (int x = minX; x <= maxX; x++) {
+ for (int y = minY; y <= maxY; y++) {
+ for (int z = minZ; z <= maxZ; z++) {
+ int cachedValue = getCacheValue(x, y, z);
+
+ if (cachedValue == 1) {
+ // non-occluding
+ return true;
+ }
+
+ if (cachedValue != 0) {
+ // was checked and it wasn't visible
+ skipList.set(id);
+ }
+ id++;
+ }
+ }
+ }
+
+ // only after the first hit wall the cache becomes valid.
+ allowRayChecks = false;
+
+ // since the cache wasn't helpfull
+ id = 0;
+ for (int x = minX; x <= maxX; x++) {
+ byte visibleOnFaceX = 0;
+ byte faceEdgeDataX = 0;
+ faceEdgeDataX |= (x == minX) ? ON_MIN_X : 0;
+ faceEdgeDataX |= (x == maxX) ? ON_MAX_X : 0;
+ visibleOnFaceX |= (x == minX && relX == Relative.POSITIVE) ? ON_MIN_X : 0;
+ visibleOnFaceX |= (x == maxX && relX == Relative.NEGATIVE) ? ON_MAX_X : 0;
+ for (int y = minY; y <= maxY; y++) {
+ byte faceEdgeDataY = faceEdgeDataX;
+ byte visibleOnFaceY = visibleOnFaceX;
+ faceEdgeDataY |= (y == minY) ? ON_MIN_Y : 0;
+ faceEdgeDataY |= (y == maxY) ? ON_MAX_Y : 0;
+ visibleOnFaceY |= (y == minY && relY == Relative.POSITIVE) ? ON_MIN_Y : 0;
+ visibleOnFaceY |= (y == maxY && relY == Relative.NEGATIVE) ? ON_MAX_Y : 0;
+ for (int z = minZ; z <= maxZ; z++) {
+ byte faceEdgeData = faceEdgeDataY;
+ byte visibleOnFace = visibleOnFaceY;
+ faceEdgeData |= (z == minZ) ? ON_MIN_Z : 0;
+ faceEdgeData |= (z == maxZ) ? ON_MAX_Z : 0;
+ visibleOnFace |= (z == minZ && relZ == Relative.POSITIVE) ? ON_MIN_Z : 0;
+ visibleOnFace |= (z == maxZ && relZ == Relative.NEGATIVE) ? ON_MAX_Z : 0;
+ if(skipList.get(id)) { // was checked and it wasn't visible
+ id++;
+ continue;
+ }
+
+ if (visibleOnFace != 0) {
+ targetPos.set(x, y, z);
+ if (isVoxelVisible(viewerPosition, targetPos, faceEdgeData, visibleOnFace)) {
+ return true;
+ }
+ }
+ id++;
+ }
+ }
+ }
+
+ return false;
+ } catch (Throwable t) {
+ // Failsafe
+ t.printStackTrace();
+ }
+ return true;
+ }
+
+ /**
+ * @param viewerPosition
+ * @param position
+ * @param faceData contains rather this Block is on the outside for a given face
+ * @param visibleOnFace contains rather a face should be concidered
+ * @return
+ */
+ private boolean isVoxelVisible(Vec3d viewerPosition, Vec3d position, byte faceData, byte visibleOnFace) {
+ int targetSize = 0;
+ Arrays.fill(dotselectors, false);
+ if((visibleOnFace & ON_MIN_X) == ON_MIN_X){
+ dotselectors[0] = true;
+ if((faceData & ~ON_MIN_X) != 0) {
+ dotselectors[1] = true;
+ dotselectors[4] = true;
+ dotselectors[5] = true;
+ }
+ dotselectors[8] = true;
+ }
+ if((visibleOnFace & ON_MIN_Y) == ON_MIN_Y){
+ dotselectors[0] = true;
+ if((faceData & ~ON_MIN_Y) != 0) {
+ dotselectors[3] = true;
+ dotselectors[4] = true;
+ dotselectors[7] = true;
+ }
+ dotselectors[9] = true;
+ }
+ if((visibleOnFace & ON_MIN_Z) == ON_MIN_Z){
+ dotselectors[0] = true;
+ if((faceData & ~ON_MIN_Z) != 0) {
+ dotselectors[1] = true;
+ dotselectors[4] = true;
+ dotselectors[5] = true;
+ }
+ dotselectors[10] = true;
+ }
+ if((visibleOnFace & ON_MAX_X) == ON_MAX_X){
+ dotselectors[4] = true;
+ if((faceData & ~ON_MAX_X) != 0) {
+ dotselectors[5] = true;
+ dotselectors[6] = true;
+ dotselectors[7] = true;
+ }
+ dotselectors[11] = true;
+ }
+ if((visibleOnFace & ON_MAX_Y) == ON_MAX_Y){
+ dotselectors[1] = true;
+ if((faceData & ~ON_MAX_Y) != 0) {
+ dotselectors[2] = true;
+ dotselectors[5] = true;
+ dotselectors[6] = true;
+ }
+ dotselectors[12] = true;
+ }
+ if((visibleOnFace & ON_MAX_Z) == ON_MAX_Z){
+ dotselectors[2] = true;
+ if((faceData & ~ON_MAX_Z) != 0) {
+ dotselectors[3] = true;
+ dotselectors[6] = true;
+ dotselectors[7] = true;
+ }
+ dotselectors[13] = true;
+ }
+
+ if (dotselectors[0])targetPoints[targetSize++].setAdd(position, 0.05, 0.05, 0.05);
+ if (dotselectors[1])targetPoints[targetSize++].setAdd(position, 0.05, 0.95, 0.05);
+ if (dotselectors[2])targetPoints[targetSize++].setAdd(position, 0.05, 0.95, 0.95);
+ if (dotselectors[3])targetPoints[targetSize++].setAdd(position, 0.05, 0.05, 0.95);
+ if (dotselectors[4])targetPoints[targetSize++].setAdd(position, 0.95, 0.05, 0.05);
+ if (dotselectors[5])targetPoints[targetSize++].setAdd(position, 0.95, 0.95, 0.05);
+ if (dotselectors[6])targetPoints[targetSize++].setAdd(position, 0.95, 0.95, 0.95);
+ if (dotselectors[7])targetPoints[targetSize++].setAdd(position, 0.95, 0.05, 0.95);
+ // middle points
+ if (dotselectors[8])targetPoints[targetSize++].setAdd(position, 0.05, 0.5, 0.5);
+ if (dotselectors[9])targetPoints[targetSize++].setAdd(position, 0.5, 0.05, 0.5);
+ if (dotselectors[10])targetPoints[targetSize++].setAdd(position, 0.5, 0.5, 0.05);
+ if (dotselectors[11])targetPoints[targetSize++].setAdd(position, 0.95, 0.5, 0.5);
+ if (dotselectors[12])targetPoints[targetSize++].setAdd(position, 0.5, 0.95, 0.5);
+ if (dotselectors[13])targetPoints[targetSize++].setAdd(position, 0.5, 0.5, 0.95);
+
+ return isVisible(viewerPosition, targetPoints, targetSize);
+ }
+
+ private boolean rayIntersection(int[] b, Vec3d rayOrigin, Vec3d rayDir) {
+ Vec3d rInv = new Vec3d(1, 1, 1).div(rayDir);
+
+ double t1 = (b[0] - rayOrigin.x) * rInv.x;
+ double t2 = (b[0] + 1 - rayOrigin.x) * rInv.x;
+ double t3 = (b[1] - rayOrigin.y) * rInv.y;
+ double t4 = (b[1] + 1 - rayOrigin.y) * rInv.y;
+ double t5 = (b[2] - rayOrigin.z) * rInv.z;
+ double t6 = (b[2] + 1 - rayOrigin.z) * rInv.z;
+
+ double tmin = Math.max(Math.max(Math.min(t1, t2), Math.min(t3, t4)), Math.min(t5, t6));
+ double tmax = Math.min(Math.min(Math.max(t1, t2), Math.max(t3, t4)), Math.max(t5, t6));
+
+ // if tmax > 0, ray (line) is intersecting AABB, but the whole AABB is behind us
+ if (tmax > 0) {
+ return false;
+ }
+
+ // if tmin > tmax, ray doesn't intersect AABB
+ if (tmin > tmax) {
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * returns the grid cells that intersect with this Vec3d<br>
+ * <a href=
+ * "http://playtechs.blogspot.de/2007/03/raytracing-on-grid.html">http://playtechs.blogspot.de/2007/03/raytracing-on-grid.html</a>
+ * <p>
+ * Caching assumes that all Vec3d's are inside the same block
+ */
+ private boolean isVisible(Vec3d start, Vec3d[] targets, int size) {
+ // start cell coordinate
+ int x = cameraPos[0];
+ int y = cameraPos[1];
+ int z = cameraPos[2];
+
+ for (int v = 0; v < size; v++) {
+ // ray-casting target
+ Vec3d target = targets[v];
+
+ double relativeX = start.x - target.getX();
+ double relativeY = start.y - target.getY();
+ double relativeZ = start.z - target.getZ();
+
+ if(allowRayChecks && rayIntersection(lastHitBlock, start, new Vec3d(relativeX, relativeY, relativeZ).normalize())) {
+ continue;
+ }
+
+ // horizontal and vertical cell amount spanned
+ double dimensionX = Math.abs(relativeX);
+ double dimensionY = Math.abs(relativeY);
+ double dimensionZ = Math.abs(relativeZ);
+
+ // distance between horizontal intersection points with cell border as a
+ // fraction of the total Vec3d length
+ double dimFracX = 1f / dimensionX;
+ // distance between vertical intersection points with cell border as a fraction
+ // of the total Vec3d length
+ double dimFracY = 1f / dimensionY;
+ double dimFracZ = 1f / dimensionZ;
+
+ // total amount of intersected cells
+ int intersectCount = 1;
+
+ // 1, 0 or -1
+ // determines the direction of the next cell (horizontally / vertically)
+ int x_inc, y_inc, z_inc;
+
+ // the distance to the next horizontal / vertical intersection point with a cell
+ // border as a fraction of the total Vec3d length
+ double t_next_y, t_next_x, t_next_z;
+
+ if (dimensionX == 0f) {
+ x_inc = 0;
+ t_next_x = dimFracX; // don't increment horizontally because the Vec3d is perfectly vertical
+ } else if (target.x > start.x) {
+ x_inc = 1; // target point is horizontally greater than starting point so increment every
+ // step by 1
+ intersectCount += MathUtilities.floor(target.x) - x; // increment total amount of intersecting cells
+ t_next_x = (float) ((x + 1 - start.x) * dimFracX); // calculate the next horizontal
+ // intersection
+ // point based on the position inside
+ // the first cell
+ } else {
+ x_inc = -1; // target point is horizontally smaller than starting point so reduce every step
+ // by 1
+ intersectCount += x - MathUtilities.floor(target.x); // increment total amount of intersecting cells
+ t_next_x = (float) ((start.x - x)
+ * dimFracX); // calculate the next horizontal
+ // intersection point
+ // based on the position inside
+ // the first cell
+ }
+
+ if (dimensionY == 0f) {
+ y_inc = 0;
+ t_next_y = dimFracY; // don't increment vertically because the Vec3d is perfectly horizontal
+ } else if (target.y > start.y) {
+ y_inc = 1; // target point is vertically greater than starting point so increment every
+ // step by 1
+ intersectCount += MathUtilities.floor(target.y) - y; // increment total amount of intersecting cells
+ t_next_y = (float) ((y + 1 - start.y)
+ * dimFracY); // calculate the next vertical
+ // intersection
+ // point based on the position inside
+ // the first cell
+ } else {
+ y_inc = -1; // target point is vertically smaller than starting point so reduce every step
+ // by 1
+ intersectCount += y - MathUtilities.floor(target.y); // increment total amount of intersecting cells
+ t_next_y = (float) ((start.y - y)
+ * dimFracY); // calculate the next vertical intersection
+ // point
+ // based on the position inside
+ // the first cell
+ }
+
+ if (dimensionZ == 0f) {
+ z_inc = 0;
+ t_next_z = dimFracZ; // don't increment vertically because the Vec3d is perfectly horizontal
+ } else if (target.z > start.z) {
+ z_inc = 1; // target point is vertically greater than starting point so increment every
+ // step by 1
+ intersectCount += MathUtilities.floor(target.z) - z; // increment total amount of intersecting cells
+ t_next_z = (float) ((z + 1 - start.z)
+ * dimFracZ); // calculate the next vertical
+ // intersection
+ // point based on the position inside
+ // the first cell
+ } else {
+ z_inc = -1; // target point is vertically smaller than starting point so reduce every step
+ // by 1
+ intersectCount += z - MathUtilities.floor(target.z); // increment total amount of intersecting cells
+ t_next_z = (float) ((start.z - z)
+ * dimFracZ); // calculate the next vertical intersection
+ // point
+ // based on the position inside
+ // the first cell
+ }
+
+ boolean finished = stepRay(start, x, y, z,
+ dimFracX, dimFracY, dimFracZ, intersectCount, x_inc, y_inc,
+ z_inc, t_next_y, t_next_x, t_next_z);
+ provider.cleanup();
+ if (finished) {
+ cacheResult(targets[0], true);
+ return true;
+ } else {
+ allowRayChecks = true;
+ }
+ }
+ cacheResult(targets[0], false);
+ return false;
+ }
+
+ private boolean stepRay(Vec3d start, int currentX, int currentY,
+ int currentZ, double distInX, double distInY,
+ double distInZ, int n, int x_inc, int y_inc,
+ int z_inc, double t_next_y, double t_next_x,
+ double t_next_z) {
+ allowWallClipping = true; // initially allow rays to go through walls till they are on the outside
+ // iterate through all intersecting cells (n times)
+ for (; n > 1; n--) { // n-1 times because we don't want to check the last block
+ // towards - where from
+
+
+ // get cached value, 0 means uncached (default)
+ int cVal = getCacheValue(currentX, currentY, currentZ);
+
+ if (cVal == 2 && !allowWallClipping) {
+ // block cached as occluding, stop ray
+ lastHitBlock[0] = currentX;
+ lastHitBlock[1] = currentY;
+ lastHitBlock[2] = currentZ;
+ return false;
+ }
+
+ if (cVal == 0) {
+ // save current cell
+ int chunkX = currentX >> 4;
+ int chunkZ = currentZ >> 4;
+
+ if (!provider.prepareChunk(chunkX, chunkZ)) { // Chunk not ready
+ return false;
+ }
+
+ if (provider.isOpaqueFullCube(currentX, currentY, currentZ)) {
+ if (!allowWallClipping) {
+ cache.setLastHidden();
+ lastHitBlock[0] = currentX;
+ lastHitBlock[1] = currentY;
+ lastHitBlock[2] = currentZ;
+ return false;
+ }
+ } else {
+ // outside of wall, now clipping is not allowed
+ allowWallClipping = false;
+ cache.setLastVisible();
+ }
+ }
+
+ if(cVal == 1) {
+ // outside of wall, now clipping is not allowed
+ allowWallClipping = false;
+ }
+
+
+ if (t_next_y < t_next_x && t_next_y < t_next_z) { // next cell is upwards/downwards because the distance to
+ // the next vertical
+ // intersection point is smaller than to the next horizontal intersection point
+ currentY += y_inc; // move up/down
+ t_next_y += distInY; // update next vertical intersection point
+ } else if (t_next_x < t_next_y && t_next_x < t_next_z) { // next cell is right/left
+ currentX += x_inc; // move right/left
+ t_next_x += distInX; // update next horizontal intersection point
+ } else {
+ currentZ += z_inc; // move right/left
+ t_next_z += distInZ; // update next horizontal intersection point
+ }
+
+ }
+ return true;
+ }
+
+ // -1 = invalid location, 0 = not checked yet, 1 = visible, 2 = occluding
+ private int getCacheValue(int x, int y, int z) {
+ x -= cameraPos[0];
+ y -= cameraPos[1];
+ z -= cameraPos[2];
+ if (Math.abs(x) > reach - 2 || Math.abs(y) > reach - 2
+ || Math.abs(z) > reach - 2) {
+ return -1;
+ }
+
+ // check if target is already known
+ return cache.getState(x + reach, y + reach, z + reach);
+ }
+
+
+ private void cacheResult(int x, int y, int z, boolean result) {
+ int cx = x - cameraPos[0] + reach;
+ int cy = y - cameraPos[1] + reach;
+ int cz = z - cameraPos[2] + reach;
+ if (result) {
+ cache.setVisible(cx, cy, cz);
+ } else {
+ cache.setHidden(cx, cy, cz);
+ }
+ }
+
+ private void cacheResult(Vec3d vector, boolean result) {
+ int cx = MathUtilities.floor(vector.x) - cameraPos[0] + reach;
+ int cy = MathUtilities.floor(vector.y) - cameraPos[1] + reach;
+ int cz = MathUtilities.floor(vector.z) - cameraPos[2] + reach;
+ if (result) {
+ cache.setVisible(cx, cy, cz);
+ } else {
+ cache.setHidden(cx, cy, cz);
+ }
+ }
+
+ public void resetCache() {
+ this.cache.resetCache();
+ }
+
+ private enum Relative {
+ INSIDE, POSITIVE, NEGATIVE;
+
+ public static Relative from(int min, int max, int pos) {
+ if (max > pos && min > pos) {
+ return POSITIVE;
+ } else if (min < pos && max < pos) {
+ return NEGATIVE;
+ }
+ return INSIDE;
+ }
+ }
+
+}

View File

@@ -1,60 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/cache/ArrayOcclusionCache.java
@@ -1,0 +_,57 @@
+package com.logisticscraft.occlusionculling.cache;
+
+import java.util.Arrays;
+
+public class ArrayOcclusionCache implements OcclusionCache {
+
+ private final int reachX2;
+ private final byte[] cache;
+ private int positionKey;
+ private int entry;
+ private int offset;
+
+ public ArrayOcclusionCache(int reach) {
+ this.reachX2 = reach * 2;
+ this.cache = new byte[(reachX2 * reachX2 * reachX2) / 4];
+ }
+
+ @Override
+ public void resetCache() {
+ Arrays.fill(cache, (byte) 0);
+ }
+
+ @Override
+ public void setVisible(int x, int y, int z) {
+ positionKey = x + y * reachX2 + z * reachX2 * reachX2;
+ entry = positionKey / 4;
+ offset = (positionKey % 4) * 2;
+ cache[entry] |= 1 << offset;
+ }
+
+ @Override
+ public void setHidden(int x, int y, int z) {
+ positionKey = x + y * reachX2 + z * reachX2 * reachX2;
+ entry = positionKey / 4;
+ offset = (positionKey % 4) * 2;
+ cache[entry] |= 1 << offset + 1;
+ }
+
+ @Override
+ public int getState(int x, int y, int z) {
+ positionKey = x + y * reachX2 + z * reachX2 * reachX2;
+ entry = positionKey / 4;
+ offset = (positionKey % 4) * 2;
+ return cache[entry] >> offset & 3;
+ }
+
+ @Override
+ public void setLastVisible() {
+ cache[entry] |= 1 << offset;
+ }
+
+ @Override
+ public void setLastHidden() {
+ cache[entry] |= 1 << offset + 1;
+ }
+
+}

View File

@@ -1,20 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/cache/OcclusionCache.java
@@ -1,0 +_,17 @@
+package com.logisticscraft.occlusionculling.cache;
+
+public interface OcclusionCache {
+
+ void resetCache();
+
+ void setVisible(int x, int y, int z);
+
+ void setHidden(int x, int y, int z);
+
+ int getState(int x, int y, int z);
+
+ void setLastHidden();
+
+ void setLastVisible();
+
+}

View File

@@ -1,28 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/util/MathUtilities.java
@@ -1,0 +_,25 @@
+package com.logisticscraft.occlusionculling.util;
+
+/**
+ * Contains MathHelper methods
+ */
+public final class MathUtilities {
+
+ private MathUtilities() {
+ }
+
+ public static int floor(double d) {
+ int i = (int) d;
+ return d < (double) i ? i - 1 : i;
+ }
+
+ public static int fastFloor(double d) {
+ return (int) (d + 1024.0) - 1024;
+ }
+
+ public static int ceil(double d) {
+ int i = (int) d;
+ return d > (double) i ? i + 1 : i;
+ }
+
+}

View File

@@ -1,90 +0,0 @@
--- /dev/null
+++ b/src/main/java/com/logisticscraft/occlusionculling/util/Vec3d.java
@@ -1,0 +_,87 @@
+package com.logisticscraft.occlusionculling.util;
+
+public class Vec3d {
+
+ public double x;
+ public double y;
+ public double z;
+
+ public Vec3d(double x, double y, double z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ public double getX() {
+ return x;
+ }
+
+ public double getY() {
+ return y;
+ }
+
+ public double getZ() {
+ return z;
+ }
+
+ public void set(double x, double y, double z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ public void setAdd(Vec3d vec, double x, double y, double z) {
+ this.x = vec.x + x;
+ this.y = vec.y + y;
+ this.z = vec.z + z;
+ }
+
+ public Vec3d div(Vec3d rayDir) {
+ this.x /= rayDir.x;
+ this.z /= rayDir.z;
+ this.y /= rayDir.y;
+ return this;
+ }
+
+ public Vec3d normalize() {
+ double mag = Math.sqrt(x*x+y*y+z*z);
+ this.x /= mag;
+ this.y /= mag;
+ this.z /= mag;
+ return this;
+ }
+
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (!(other instanceof Vec3d)) {
+ return false;
+ }
+ Vec3d vec3d = (Vec3d) other;
+ if (Double.compare(vec3d.x, x) != 0) {
+ return false;
+ }
+ if (Double.compare(vec3d.y, y) != 0) {
+ return false;
+ }
+ return Double.compare(vec3d.z, z) == 0;
+ }
+
+ @Override
+ public int hashCode() {
+ long l = Double.doubleToLongBits(x);
+ int i = (int) (l ^ l >>> 32);
+ l = Double.doubleToLongBits(y);
+ i = 31 * i + (int) (l ^ l >>> 32);
+ l = Double.doubleToLongBits(z);
+ i = 31 * i + (int) (l ^ l >>> 32);
+ return i;
+ }
+
+ @Override
+ public String toString() {
+ return "(" + x + ", " + y + ", " + z + ")";
+ }
+
+}

View File

@@ -1,144 +0,0 @@
--- /dev/null
+++ b/src/main/java/dev/kaiijumc/kaiiju/KaiijuEntityLimits.java
@@ -1,0 +_,141 @@
+package dev.kaiijumc.kaiiju;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.logging.Level;
+
+import com.google.common.base.Throwables;
+import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
+import io.github.classgraph.ClassGraph;
+import io.github.classgraph.ClassInfo;
+import io.github.classgraph.ScanResult;
+import org.slf4j.Logger;
+
+import com.mojang.logging.LogUtils;
+import net.minecraft.world.entity.Entity;
+import org.bukkit.Bukkit;
+import org.bukkit.configuration.InvalidConfigurationException;
+import org.bukkit.configuration.file.YamlConfiguration;
+
+@SuppressWarnings("unused")
+public class KaiijuEntityLimits {
+ private static final Logger LOGGER = LogUtils.getLogger();
+ private static final File CONFIG_FOLDER = new File("luminol_config");
+
+ protected static final String HEADER =
+ "Per region entity limits for Kaiiju.\n"
+ + "If there are more of particular entity type in a region than limit, entity ticking will be throttled.\n"
+ + "Example: for Wither limit 100 & 300 Withers in a region -> 100 Withers tick every tick & every Wither ticks every 3 ticks.\n"
+ + "Available entities: GlowSquid, Ambient, Bat, Animal, Bee, Cat, Chicken, Cod, Cow, Dolphin, Fish, FishSchool, Fox, Golem, IronGolem, "
+ + "MushroomCow, Ocelot, Panda, Parrot, Perchable, Pig, PolarBear, PufferFish, Rabbit, Salmon, Sheep, Snowman, Squid, TropicalFish, Turtle, "
+ + "WaterAnimal, Wolf, Allay, Axolotl, Camel, Frog, Tadpole, Goat, Horse, HorseAbstract, HorseChestedAbstract, HorseDonkey, HorseMule, "
+ + "HorseSkeleton, HorseZombie, Llama, LlamaTrader, Sniffer, EnderCrystal, EnderDragon, Wither, ArmorStand, Hanging, ItemFrame, Leash, "
+ + "Painting, GlowItemFrame, FallingBlock, Item, TNTPrimed, Blaze, CaveSpider, Creeper, Drowned, Enderman, Endermite, Evoker, Ghast, "
+ + "GiantZombie, Guardian, GuardianElder, IllagerAbstract, IllagerIllusioner, IllagerWizard, MagmaCube, Monster, MonsterPatrolling, Phantom, "
+ + "PigZombie, Pillager, Ravager, Shulker, Silverfish, Skeleton, SkeletonAbstract, SkeletonStray, SkeletonWither, Slime, Spider, Strider, Vex, "
+ + "Vindicator, Witch, Zoglin, Zombie, ZombieHusk, ZombieVillager, Hoglin, Piglin, PiglinAbstract, PiglinBrute, Warden, Villager, "
+ + "VillagerTrader, Arrow, DragonFireball, Egg, EnderPearl, EnderSignal, EvokerFangs, Fireball, FireballFireball, Fireworks, FishingHook, "
+ + "LargeFireball, LlamaSpit, Potion, Projectile, ProjectileThrowable, ShulkerBullet, SmallFireball, Snowball, SpectralArrow, ThrownExpBottle, "
+ + "ThrownTrident, TippedArrow, WitherSkull, Raider, ChestBoat, Boat, MinecartAbstract, MinecartChest, MinecartCommandBlock, MinecartContainer, "
+ + "MinecartFurnace, MinecartHopper, MinecartMobSpawner, MinecartRideable, MinecartTNT\n";
+ protected static final File ENTITY_LIMITS_FILE = new File(CONFIG_FOLDER, "kaiiju_entity_limits.yml");
+ public static YamlConfiguration entityLimitsConfig;
+ public static boolean enabled = false;
+
+ protected static Map<Class<? extends Entity>, EntityLimit> entityLimits;
+
+ static final String ENTITY_PREFIX = "Entity";
+
+ public static void init() {
+ init(true);
+ }
+
+ private static void init(boolean setup) {
+ entityLimitsConfig = new YamlConfiguration();
+
+ if (ENTITY_LIMITS_FILE.exists()) {
+ try {
+ entityLimitsConfig.load(ENTITY_LIMITS_FILE);
+ } catch (InvalidConfigurationException ex) {
+ Bukkit.getLogger().log(Level.SEVERE, "Could not load kaiiju_entity_limits.yml, please correct your syntax errors", ex);
+ throw Throwables.propagate(ex);
+ } catch (IOException ignore) {}
+ } else {
+ if (setup) {
+ entityLimitsConfig.options().header(HEADER);
+ entityLimitsConfig.options().copyDefaults(true);
+ entityLimitsConfig.set("enabled", enabled);
+ entityLimitsConfig.set("Axolotl.limit", 1000);
+ entityLimitsConfig.set("Axolotl.removal", 2000);
+ try {
+ entityLimitsConfig.save(ENTITY_LIMITS_FILE);
+ } catch (IOException ex) {
+ Bukkit.getLogger().log(Level.SEVERE, "Could not save " + ENTITY_LIMITS_FILE, ex);
+ }
+ }
+ }
+
+ enabled = entityLimitsConfig.getBoolean("enabled");
+
+ entityLimits = new Object2ObjectOpenHashMap<>();
+ try (ScanResult scanResult = new ClassGraph().enableAllInfo().acceptPackages("net.minecraft.world.entity").scan()) {
+ Map<String, ClassInfo> entityClasses = new HashMap<>();
+ for (ClassInfo classInfo : scanResult.getAllClasses()) {
+ Class<?> entityClass = Class.forName(classInfo.getName());
+ if (Entity.class.isAssignableFrom(entityClass)) {
+ String entityName = extractEntityName(entityClass.getSimpleName());
+ entityClasses.put(entityName, classInfo);
+ }
+ }
+
+ for (String key : entityLimitsConfig.getKeys(false)) {
+ if (key.equals("enabled")) {
+ continue;
+ }
+
+ if (!entityClasses.containsKey(key)) {
+ LOGGER.error("Unknown entity '" + key + "' in kaiiju-entity-limits.yml, skipping");
+ continue;
+ }
+ int limit = entityLimitsConfig.getInt(key + ".limit");
+ int removal = entityLimitsConfig.getInt(key + ".removal");
+
+ if (limit < 1) {
+ LOGGER.error(key + " has a limit less than the minimum of 1, ignoring");
+ continue;
+ }
+ if (removal <= limit && removal != -1) {
+ LOGGER.error(key + " has a removal limit that is less than or equal to its limit, setting removal to limit * 10");
+ removal = limit * 10;
+ }
+
+ entityLimits.put((Class<? extends Entity>) Class.forName(entityClasses.get(key).getName()), new EntityLimit(limit, removal));
+ }
+ } catch (ClassNotFoundException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static EntityLimit getEntityLimit(Entity entity) {
+ return entityLimits.get(entity.getClass());
+ }
+
+ private static String extractEntityName(String input) {
+ int prefixLength = ENTITY_PREFIX.length();
+
+ if (input.length() <= prefixLength || !input.startsWith(ENTITY_PREFIX)) {
+ return input;
+ } else {
+ return input.substring(prefixLength);
+ }
+ }
+
+ public record EntityLimit(int limit, int removal) {
+ @Override
+ public String toString() {
+ return "EntityLimit{limit=" + limit + ", removal=" + removal + "}";
+ }
+ }
+}

View File

@@ -1,87 +0,0 @@
--- /dev/null
+++ b/src/main/java/dev/kaiijumc/kaiiju/KaiijuEntityThrottler.java
@@ -1,0 +_,84 @@
+package dev.kaiijumc.kaiiju;
+
+import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
+import net.minecraft.world.entity.Entity;
+import io.papermc.paper.threadedregions.RegionizedWorldData;
+
+public class KaiijuEntityThrottler {
+ private static class TickInfo {
+ int currentTick;
+ int continueFrom;
+ int toTick;
+ int toRemove;
+ }
+
+ public static class EntityThrottlerReturn {
+ public boolean skip;
+ public boolean remove;
+ }
+
+ private final Object2ObjectOpenHashMap<KaiijuEntityLimits.EntityLimit, TickInfo> entityLimitTickInfoMap = new Object2ObjectOpenHashMap<>();
+
+ public void tickLimiterStart() {
+ for (TickInfo tickInfo : entityLimitTickInfoMap.values()) {
+ tickInfo.currentTick = 0;
+ }
+ }
+
+ public EntityThrottlerReturn tickLimiterShouldSkip(Entity entity) {
+ EntityThrottlerReturn retVal = new EntityThrottlerReturn();
+ if (entity.isRemoved()) return retVal;
+ KaiijuEntityLimits.EntityLimit entityLimit = KaiijuEntityLimits.getEntityLimit(entity);
+
+ if (entityLimit != null) {
+ TickInfo tickInfo = entityLimitTickInfoMap.computeIfAbsent(entityLimit, el -> {
+ TickInfo newTickInfo = new TickInfo();
+ newTickInfo.toTick = entityLimit.limit();
+ return newTickInfo;
+ });
+
+ tickInfo.currentTick++;
+ if (tickInfo.currentTick <= tickInfo.toRemove && entityLimit.removal() > 0) {
+ retVal.skip = false;
+ retVal.remove = true;
+ return retVal;
+ }
+
+ if (tickInfo.currentTick < tickInfo.continueFrom) {
+ retVal.skip = true;
+ return retVal;
+ }
+ if (tickInfo.currentTick - tickInfo.continueFrom < tickInfo.toTick) {
+ retVal.skip = false;
+ return retVal;
+ }
+ retVal.skip = true;
+ return retVal;
+ } else {
+ retVal.skip = false;
+ return retVal;
+ }
+ }
+
+ public void tickLimiterFinish(RegionizedWorldData regionizedWorldData) {
+ for (var entry : entityLimitTickInfoMap.entrySet()) {
+ KaiijuEntityLimits.EntityLimit entityLimit = entry.getKey();
+ TickInfo tickInfo = entry.getValue();
+
+ int additionals = 0;
+ int nextContinueFrom = tickInfo.continueFrom + tickInfo.toTick;
+ if (nextContinueFrom >= tickInfo.currentTick) {
+ additionals = entityLimit.limit() - (tickInfo.currentTick - tickInfo.continueFrom);
+ nextContinueFrom = 0;
+ }
+ tickInfo.continueFrom = nextContinueFrom;
+ tickInfo.toTick = entityLimit.limit() + additionals;
+
+ if (tickInfo.toRemove == 0 && tickInfo.currentTick > entityLimit.removal()) {
+ tickInfo.toRemove = tickInfo.currentTick - entityLimit.removal();
+ } else if (tickInfo.toRemove != 0) {
+ tickInfo.toRemove = 0;
+ }
+ }
+ }
+}

View File

@@ -1,136 +0,0 @@
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/sentry/PufferfishSentryAppender.java
@@ -1,0 +_,133 @@
+package gg.pufferfish.pufferfish.sentry;
+
+import com.google.common.reflect.TypeToken;
+import com.google.gson.Gson;
+import io.sentry.Breadcrumb;
+import io.sentry.Sentry;
+import io.sentry.SentryEvent;
+import io.sentry.SentryLevel;
+import io.sentry.protocol.Message;
+import io.sentry.protocol.User;
+
+import java.util.Map;
+
+import me.earthme.luminol.config.modules.misc.SentryConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Marker;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.filter.AbstractFilter;
+
+public class PufferfishSentryAppender extends AbstractAppender {
+
+ private static final org.apache.logging.log4j.Logger logger = LogManager.getLogger(PufferfishSentryAppender.class.getSimpleName());
+ private static final Gson GSON = new Gson();
+ private final Level logLevel;
+
+ public PufferfishSentryAppender(Level logLevel) {
+ super("PufferfishSentryAdapter", new SentryFilter(), null);
+ this.logLevel = logLevel;
+ }
+
+ @Override
+ public void append(LogEvent logEvent) {
+ if (logEvent.getLevel().isMoreSpecificThan(logLevel) && (logEvent.getThrown() != null || !SentryConfig.onlyLogThrown)) {
+ try {
+ logException(logEvent);
+ } catch (Exception e) {
+ logger.warn("Failed to log event with sentry", e);
+ }
+ } else {
+ try {
+ logBreadcrumb(logEvent);
+ } catch (Exception e) {
+ logger.warn("Failed to log event with sentry", e);
+ }
+ }
+ }
+
+ private void logException(LogEvent e) {
+ SentryEvent event = new SentryEvent(e.getThrown());
+
+ Message sentryMessage = new Message();
+ sentryMessage.setMessage(e.getMessage().getFormattedMessage());
+
+ event.setThrowable(e.getThrown());
+ event.setLevel(getLevel(e.getLevel()));
+ event.setLogger(e.getLoggerName());
+ event.setTransaction(e.getLoggerName());
+ event.setExtra("thread_name", e.getThreadName());
+
+ boolean hasContext = e.getContextData() != null;
+
+ if (hasContext && e.getContextData().containsKey("pufferfishsentry_playerid")) {
+ User user = new User();
+ user.setId(e.getContextData().getValue("pufferfishsentry_playerid"));
+ user.setUsername(e.getContextData().getValue("pufferfishsentry_playername"));
+ event.setUser(user);
+ }
+
+ if (hasContext && e.getContextData().containsKey("pufferfishsentry_pluginname")) {
+ event.setExtra("plugin.name", e.getContextData().getValue("pufferfishsentry_pluginname"));
+ event.setExtra("plugin.version", e.getContextData().getValue("pufferfishsentry_pluginversion"));
+ event.setTransaction(e.getContextData().getValue("pufferfishsentry_pluginname"));
+ }
+
+ if (hasContext && e.getContextData().containsKey("pufferfishsentry_eventdata")) {
+ Map<String, String> eventFields = GSON.fromJson((String) e.getContextData().getValue("pufferfishsentry_eventdata"), new TypeToken<Map<String, String>>() {
+ }.getType());
+ if (eventFields != null) {
+ event.setExtra("event", eventFields);
+ }
+ }
+
+ Sentry.captureEvent(event);
+ }
+
+ private void logBreadcrumb(LogEvent e) {
+ Breadcrumb breadcrumb = new Breadcrumb();
+
+ breadcrumb.setLevel(getLevel(e.getLevel()));
+ breadcrumb.setCategory(e.getLoggerName());
+ breadcrumb.setType(e.getLoggerName());
+ breadcrumb.setMessage(e.getMessage().getFormattedMessage());
+
+ Sentry.addBreadcrumb(breadcrumb);
+ }
+
+ private SentryLevel getLevel(Level level) {
+ return switch (level.getStandardLevel()) {
+ case TRACE, DEBUG -> SentryLevel.DEBUG;
+ case WARN -> SentryLevel.WARNING;
+ case ERROR -> SentryLevel.ERROR;
+ case FATAL -> SentryLevel.FATAL;
+ default -> SentryLevel.INFO;
+ };
+ }
+
+ private static class SentryFilter extends AbstractFilter {
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, String msg,
+ Object... params) {
+ return this.filter(logger.getName());
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, Object msg, Throwable t) {
+ return this.filter(logger.getName());
+ }
+
+ @Override
+ public Result filter(LogEvent event) {
+ return this.filter(event == null ? null : event.getLoggerName());
+ }
+
+ private Result filter(String loggerName) {
+ return loggerName != null && loggerName.startsWith("gg.castaway.pufferfish.sentry") ? Result.DENY
+ : Result.NEUTRAL;
+ }
+ }
+}

View File

@@ -1,47 +0,0 @@
--- /dev/null
+++ b/src/main/java/gg/pufferfish/pufferfish/sentry/SentryManager.java
@@ -1,0 +_,44 @@
+package gg.pufferfish.pufferfish.sentry;
+
+import io.sentry.Sentry;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class SentryManager {
+
+ private static final Logger logger = LogManager.getLogger(SentryManager.class);
+
+ private SentryManager() {
+
+ }
+
+ private static boolean initialized = false;
+
+ public static synchronized void init(Level logLevel) {
+ if (initialized) {
+ return;
+ }
+ if (logLevel == null) {
+ logger.error("Invalid log level, defaulting to WARN.");
+ logLevel = Level.WARN;
+ }
+ try {
+ initialized = true;
+
+ Sentry.init(options -> {
+ options.setDsn(me.earthme.luminol.config.modules.misc.SentryConfig.sentryDsn);
+ options.setMaxBreadcrumbs(100);
+ });
+
+ PufferfishSentryAppender appender = new PufferfishSentryAppender(logLevel);
+ appender.start();
+ ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger()).addAppender(appender);
+ logger.info("Sentry logging started!");
+ } catch (Exception e) {
+ logger.warn("Failed to initialize sentry!", e);
+ initialized = false;
+ }
+ }
+
+}

View File

@@ -1,30 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/impl/RegionStatsImpl.java
@@ -1,0 +_,27 @@
+package me.earthme.luminol.api.impl;
+
+import io.papermc.paper.threadedregions.TickRegions;
+import me.earthme.luminol.api.RegionStats;
+
+public class RegionStatsImpl implements RegionStats {
+ private final TickRegions.RegionStats internal;
+
+ public RegionStatsImpl(TickRegions.RegionStats internal) {
+ this.internal = internal;
+ }
+
+ @Override
+ public int getEntityCount() {
+ return this.internal.getEntityCount();
+ }
+
+ @Override
+ public int getPlayerCount() {
+ return this.internal.getPlayerCount();
+ }
+
+ @Override
+ public int getChunkCount() {
+ return this.internal.getChunkCount();
+ }
+}

View File

@@ -1,55 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/impl/ThreadedRegionImpl.java
@@ -1,0 +_,52 @@
+package me.earthme.luminol.api.impl;
+
+import io.papermc.paper.threadedregions.ThreadedRegionizer;
+import io.papermc.paper.threadedregions.TickRegions;
+import me.earthme.luminol.api.ThreadedRegion;
+import me.earthme.luminol.api.TickRegionData;
+import net.minecraft.world.level.ChunkPos;
+import org.bukkit.Location;
+import org.bukkit.World;
+
+import javax.annotation.Nullable;
+
+public class ThreadedRegionImpl implements ThreadedRegion {
+ private final ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> internal;
+
+ public ThreadedRegionImpl(ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> internal) {
+ this.internal = internal;
+ }
+
+ @Nullable
+ @Override
+ public Location getCenterChunkPos() {
+ final ChunkPos centerChunkPos = this.internal.getCenterChunk();
+
+ if (centerChunkPos == null) {
+ return null;
+ }
+
+ return new Location(this.internal.regioniser.world.getWorld(), centerChunkPos.getMiddleBlockX(), 0, centerChunkPos.getMiddleBlockZ());
+ }
+
+ @Override
+ public double getDeadSectionPercent() {
+ return this.internal.getDeadSectionPercent();
+ }
+
+ @Override
+ public TickRegionData getTickRegionData() {
+ return this.internal.getData().tickRegionDataAPI;
+ }
+
+ @Nullable
+ @Override
+ public World getWorld() {
+ return this.internal.regioniser.world.getWorld();
+ }
+
+ @Override
+ public long getId() {
+ return this.internal.id;
+ }
+}

View File

@@ -1,56 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/impl/ThreadedRegionizerImpl.java
@@ -1,0 +_,53 @@
+package me.earthme.luminol.api.impl;
+
+import io.papermc.paper.threadedregions.TickRegions;
+import me.earthme.luminol.api.ThreadedRegion;
+import me.earthme.luminol.api.ThreadedRegionizer;
+import net.minecraft.server.level.ServerLevel;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class ThreadedRegionizerImpl implements ThreadedRegionizer {
+ private final ServerLevel internal;
+
+ public ThreadedRegionizerImpl(ServerLevel internal) {
+ this.internal = internal;
+ }
+
+ @Override
+ public Collection<ThreadedRegion> getAllRegions() {
+ final List<ThreadedRegion> ret = new ArrayList<>();
+
+ this.internal.regioniser.computeForAllRegions(region -> {
+ final ThreadedRegion wrapped = new ThreadedRegionImpl(region);
+
+ ret.add(wrapped);
+ });
+
+ return ret;
+ }
+
+ @Override
+ public ThreadedRegion getAtSynchronized(int chunkX, int chunkZ) {
+ final io.papermc.paper.threadedregions.ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> got = this.internal.regioniser.getRegionAtSynchronised(chunkX, chunkZ);
+
+ if (got == null) {
+ return null;
+ }
+
+ return got.threadedRegionAPI;
+ }
+
+ @Override
+ public ThreadedRegion getAtUnSynchronized(int chunkX, int chunkZ) {
+ final io.papermc.paper.threadedregions.ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> got = this.internal.regioniser.getRegionAtUnsynchronised(chunkX, chunkZ);
+
+ if (got == null) {
+ return null;
+ }
+
+ return got.threadedRegionAPI;
+ }
+}

View File

@@ -1,33 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/api/impl/TickRegionDataImpl.java
@@ -1,0 +_,30 @@
+package me.earthme.luminol.api.impl;
+
+import io.papermc.paper.threadedregions.TickRegions;
+import me.earthme.luminol.api.RegionStats;
+import me.earthme.luminol.api.TickRegionData;
+import org.bukkit.World;
+
+public class TickRegionDataImpl implements TickRegionData {
+ private final TickRegions.TickRegionData internal;
+
+ public TickRegionDataImpl(TickRegions.TickRegionData internal) {
+ this.internal = internal;
+ }
+
+ @Override
+ public World getWorld() {
+ return this.internal.world.getWorld();
+ }
+
+ @Override
+ public long getCurrentTickCount() {
+ return this.internal.getCurrentTick();
+ }
+
+ @Override
+ public RegionStats getRegionStats() {
+ return this.internal.getRegionStats().regionStatsAPI;
+ }
+
+}

View File

@@ -1,124 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/commands/LuminolConfigCommand.java
@@ -1,0 +_,121 @@
+package me.earthme.luminol.commands;
+
+import me.earthme.luminol.config.LuminolConfig;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+import org.bukkit.Location;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class LuminolConfigCommand extends Command {
+ public LuminolConfigCommand() {
+ super("luminolconfig");
+ this.setPermission("luminol.commands.luminolconfig");
+ this.setDescription("Manage config file");
+ this.setUsage("/luminolconfig");
+ }
+
+ public void wrongUse(CommandSender sender) {
+ sender.sendMessage(
+ Component
+ .text("Wrong use!")
+ .color(TextColor.color(255, 0, 0))
+ );
+ }
+
+ @Override
+ public @NotNull List<String> tabComplete(@NotNull CommandSender sender, @NotNull String alias, @NotNull String[] args, @Nullable Location location) throws IllegalArgumentException {
+ final List<String> result = new ArrayList<>();
+
+ if (args.length == 1) {
+ result.add("query");
+ result.add("set");
+ result.add("reset");
+ result.add("reload");
+ } else if (args.length == 2 && (args[0].equals("query") || args[0].equals("set") || args[0].equals("reset"))) {
+ result.addAll(LuminolConfig.completeConfigPath(args[1]));
+ }
+ return result;
+ }
+
+ @Override
+ public boolean execute(@NotNull CommandSender sender, @NotNull String commandLabel, @NotNull String[] args) {
+ if (!this.testPermission(sender)) {
+ sender.sendMessage(Component
+ .text("No permission to execute this command!")
+ .color(TextColor.color(255, 0, 0))
+ );
+ }
+
+ if (args.length < 1) {
+ wrongUse(sender);
+ return true;
+ }
+
+ switch (args[0]) {
+ case "reload" -> {
+ LuminolConfig.reloadAsync().thenAccept(nullValue -> sender.sendMessage(
+ Component
+ .text("Reloaded config file!")
+ .color(TextColor.color(0, 255, 0))
+ ));
+ }
+ case "set" -> {
+ if (args.length == 2 || args.length > 3) {
+ wrongUse(sender);
+ return true;
+ } else if (LuminolConfig.setConfig(args[1], args[2])) {
+ LuminolConfig.reloadAsync().thenAccept(nullValue -> sender.sendMessage(
+ Component
+ .text("Set Config " + args[1] + " to " + args[2] + " successfully!")
+ .color(TextColor.color(0, 255, 0))
+ ));
+ } else {
+ sender.sendMessage(
+ Component
+ .text("Failed to set config " + args[1] + " to " + args[2] + "!")
+ .color(TextColor.color(255, 0, 0))
+ );
+ }
+ }
+ case "reset" -> {
+ if (args.length != 2) {
+ wrongUse(sender);
+ return true;
+ } else {
+ LuminolConfig.resetConfig(args[1]);
+ LuminolConfig.reloadAsync().thenAccept(nullValue -> sender.sendMessage(
+ Component
+ .text("Reset Config " + args[1] + " to " + LuminolConfig.getConfig(args[1]) + " successfully!")
+ .color(TextColor.color(0, 255, 0))
+ ));
+ }
+ }
+ case "query" -> {
+ if (args.length != 2) {
+ wrongUse(sender);
+ return true;
+ } else {
+ sender.sendMessage(
+ Component
+ .text("Config " + args[1] + " is " + LuminolConfig.getConfig(args[1]) + "!")
+ .color(TextColor.color(0, 255, 0))
+ );
+ }
+ }
+
+ default -> sender.sendMessage(
+ Component
+ .text("Unknown action!")
+ .color(TextColor.color(255, 0, 0))
+ );
+ }
+
+ return true;
+ }
+}

View File

@@ -1,50 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/commands/MembarCommand.java
@@ -1,0 +_,47 @@
+package me.earthme.luminol.commands;
+
+import me.earthme.luminol.config.modules.misc.MembarConfig;
+import me.earthme.luminol.functions.GlobalServerMemoryBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+
+public class MembarCommand extends Command {
+ public MembarCommand(@NotNull String name) {
+ super(name);
+ this.setPermission("luminol.commands.membar");
+ this.setDescription("Show the memory usage through a bossbar");
+ this.setUsage("/membar");
+ }
+
+ @Override
+ public boolean execute(@NotNull CommandSender sender, @NotNull String commandLabel, @NotNull String[] args) {
+ if (!testPermission(sender)) {
+ return true;
+ }
+
+ if (!MembarConfig.memoryBarEnabled) {
+ sender.sendMessage(Component.text("Membar was already disabled!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (!(sender instanceof Player player)) {
+ sender.sendMessage(Component.text("Only player can use this command!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (GlobalServerMemoryBar.isPlayerVisible(player)) {
+ player.sendMessage(Component.text("Disabled mem bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerMemoryBar.setVisibilityForPlayer(player, false);
+ return true;
+ }
+
+ player.sendMessage(Component.text("Enabled mem bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerMemoryBar.setVisibilityForPlayer(player, true);
+
+ return true;
+ }
+}

View File

@@ -1,50 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/commands/RegionBarCommand.java
@@ -1,0 +_,47 @@
+package me.earthme.luminol.commands;
+
+import me.earthme.luminol.config.modules.misc.RegionBarConfig;
+import me.earthme.luminol.functions.GlobalServerRegionBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+
+public class RegionBarCommand extends Command {
+ public RegionBarCommand(@NotNull String name) {
+ super(name);
+ this.setPermission("luminol.commands.regionbar");
+ this.setDescription("Show info about your current region through a bossbar");
+ this.setUsage("/regionbar");
+ }
+
+ @Override
+ public boolean execute(@NotNull CommandSender sender, @NotNull String commandLabel, @NotNull String[] args) {
+ if (!testPermission(sender)) {
+ return true;
+ }
+
+ if (!RegionBarConfig.regionbarEnabled) {
+ sender.sendMessage(Component.text("Regionbar was already disabled!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (!(sender instanceof Player player)) {
+ sender.sendMessage(Component.text("Only player can use this command!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (GlobalServerRegionBar.isPlayerVisible(player)) {
+ player.sendMessage(Component.text("Disabled region bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerRegionBar.setVisibilityForPlayer(player, false);
+ return true;
+ }
+
+ player.sendMessage(Component.text("Enabled region bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerRegionBar.setVisibilityForPlayer(player, true);
+
+ return true;
+ }
+}

View File

@@ -1,50 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/commands/TpsBarCommand.java
@@ -1,0 +_,47 @@
+package me.earthme.luminol.commands;
+
+import me.earthme.luminol.config.modules.misc.TpsBarConfig;
+import me.earthme.luminol.functions.GlobalServerTpsBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.format.TextColor;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+
+public class TpsBarCommand extends Command {
+ public TpsBarCommand(@NotNull String name) {
+ super(name);
+ this.setPermission("luminol.commands.tpsbar");
+ this.setDescription("Show the tps and mspt through a bossbar");
+ this.setUsage("/tpsbar");
+ }
+
+ @Override
+ public boolean execute(@NotNull CommandSender sender, @NotNull String commandLabel, @NotNull String[] args) {
+ if (!testPermission(sender)) {
+ return true;
+ }
+
+ if (!TpsBarConfig.tpsbarEnabled) {
+ sender.sendMessage(Component.text("Tpsbar was already disabled!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (!(sender instanceof Player player)) {
+ sender.sendMessage(Component.text("Only player can use this command!").color(TextColor.color(255, 0, 0)));
+ return true;
+ }
+
+ if (GlobalServerTpsBar.isPlayerVisible(player)) {
+ player.sendMessage(Component.text("Disabled tps bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerTpsBar.setVisibilityForPlayer(player, false);
+ return true;
+ }
+
+ player.sendMessage(Component.text("Enabled tps bar").color(TextColor.color(0, 255, 0)));
+ GlobalServerTpsBar.setVisibilityForPlayer(player, true);
+
+ return true;
+ }
+}

View File

@@ -1,10 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/DefaultTransformLogic.java
@@ -1,0 +_,7 @@
+package me.earthme.luminol.config;
+
+public class DefaultTransformLogic {
+ public Object transform(Object obj) {
+ return obj;
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/EnumConfigCategory.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config;
+
+public enum EnumConfigCategory {
+ OPTIMIZATIONS("optimizations"),
+ FIXES("fixes"),
+ MISC("misc"),
+ GAMEPLAY("gameplay"),
+ EXPERIMENT("experiment"),
+ REMOVED("removed");
+
+ private final String baseKeyName;
+
+ EnumConfigCategory(String baseKeyName) {
+ this.baseKeyName = baseKeyName;
+ }
+
+ public String getBaseKeyName() {
+ return this.baseKeyName;
+ }
+}

View File

@@ -1,26 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/IConfigModule.java
@@ -1,0 +_,23 @@
+package me.earthme.luminol.config;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import org.jetbrains.annotations.NotNull;
+
+public interface IConfigModule {
+
+ EnumConfigCategory getCategory();
+
+ String getBaseName();
+
+ default void onLoaded(CommentedFileConfig configInstance) {
+ }
+
+ default <T> T get(String keyName, T defaultValue, @NotNull CommentedFileConfig config) {
+ if (!config.contains(keyName)) {
+ config.set(keyName, defaultValue);
+ return defaultValue;
+ }
+
+ return config.get(keyName);
+ }
+}

View File

@@ -1,385 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/LuminolConfig.java
@@ -1,0 +_,382 @@
+package me.earthme.luminol.config;
+
+import com.electronwill.nightconfig.core.UnmodifiableConfig;
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import io.papermc.paper.threadedregions.RegionizedServer;
+import me.earthme.luminol.commands.LuminolConfigCommand;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import me.earthme.luminol.config.flags.HotReloadUnsupported;
+import me.earthme.luminol.config.flags.TransformedConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.bukkit.Bukkit;
+import org.jetbrains.annotations.Contract;
+import org.jetbrains.annotations.NotNull;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Modifier;
+import java.net.JarURLConnection;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.CompletableFuture;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+public class LuminolConfig {
+ public static final Logger logger = LogManager.getLogger();
+ private static final File baseConfigFolder = new File("luminol_config");
+ private static final File baseConfigFile = new File(baseConfigFolder, "luminol_global_config.toml");
+ private static final Set<IConfigModule> allInstanced = new HashSet<>();
+ private static final Map<String, Object> stagedConfigMap = new HashMap<>();
+ private static final Map<String, Object> defaultvalueMap = new HashMap<>();
+ public static boolean alreadyInit = false;
+ private static CommentedFileConfig configFileInstance;
+
+ public static void setupLatch() {
+ Bukkit.getCommandMap().register("luminolconfig", "luminol", new LuminolConfigCommand());
+ alreadyInit = true;
+ }
+
+ public static void reload() {
+ RegionizedServer.ensureGlobalTickThread("Reload luminol config off global region thread!");
+
+ dropAllInstanced();
+ try {
+ preLoadConfig();
+ finalizeLoadConfig();
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ }
+
+ @Contract(" -> new")
+ public static @NotNull CompletableFuture<Void> reloadAsync() {
+ return CompletableFuture.runAsync(LuminolConfig::reload, task -> RegionizedServer.getInstance().addTask(() -> {
+ try {
+ task.run();
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ }));
+ }
+
+ public static void dropAllInstanced() {
+ allInstanced.clear();
+ }
+
+ public static void finalizeLoadConfig() {
+ for (IConfigModule module : allInstanced) {
+ module.onLoaded(configFileInstance);
+ }
+ }
+
+ public static void preLoadConfig() throws IOException {
+ baseConfigFolder.mkdirs();
+
+ if (!baseConfigFile.exists()) {
+ baseConfigFile.createNewFile();
+ }
+
+ configFileInstance = CommentedFileConfig.of(baseConfigFile);
+
+ configFileInstance.load();
+
+ try {
+ instanceAllModule();
+ loadAllModules();
+ } catch (Exception e) {
+ logger.error("Failed to load config modules!", e);
+ throw new RuntimeException(e);
+ }
+
+ saveConfigs();
+ }
+
+ private static void loadAllModules() throws IllegalAccessException {
+ for (IConfigModule instanced : allInstanced) {
+ loadForSingle(instanced);
+ }
+ }
+
+ private static void instanceAllModule() throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
+ for (Class<?> clazz : getClasses("me.earthme.luminol.config.modules")) {
+ if (IConfigModule.class.isAssignableFrom(clazz)) {
+ allInstanced.add((IConfigModule) clazz.getConstructor().newInstance());
+ }
+ }
+ }
+
+ private static void loadForSingle(@NotNull IConfigModule singleConfigModule) throws IllegalAccessException {
+ final EnumConfigCategory category = singleConfigModule.getCategory();
+
+ Field[] fields = singleConfigModule.getClass().getDeclaredFields();
+
+ for (Field field : fields) {
+ int modifiers = field.getModifiers();
+ if (Modifier.isStatic(modifiers) && !Modifier.isFinal(modifiers)) {
+ boolean skipLoad = field.getAnnotation(DoNotLoad.class) != null || (alreadyInit && field.getAnnotation(HotReloadUnsupported.class) != null);
+ ConfigInfo configInfo = field.getAnnotation(ConfigInfo.class);
+
+ if (skipLoad || configInfo == null) {
+ continue;
+ }
+
+ final String fullConfigKeyName = category.getBaseKeyName() + "." + singleConfigModule.getBaseName() + "." + configInfo.baseName();
+
+ field.setAccessible(true);
+ final Object currentValue = field.get(null);
+ if (!alreadyInit) defaultvalueMap.put(fullConfigKeyName, currentValue);
+ boolean removed = fullConfigKeyName.equals("removed.removed_config.removed");
+
+ if (!configFileInstance.contains(fullConfigKeyName) || removed) {
+ for (TransformedConfig transformedConfig : field.getAnnotationsByType(TransformedConfig.class)) {
+ final String oldConfigKeyName = String.join(".", transformedConfig.category()) + "." + transformedConfig.name();
+ Object oldValue = configFileInstance.get(oldConfigKeyName);
+ if (oldValue != null) {
+ boolean success = true;
+ if (transformedConfig.transform() && !removed) {
+ try {
+ for (Class<? extends DefaultTransformLogic> logic : transformedConfig.transformLogic()) {
+ oldValue = logic.getDeclaredConstructor().newInstance().transform(oldValue);
+ }
+ configFileInstance.add(fullConfigKeyName, oldValue);
+ } catch (Exception e) {
+ success = false;
+ logger.error("Failed to transform removed config {}!", transformedConfig.name());
+ }
+
+ if (transformedConfig.transformComments()) {
+ configFileInstance.setComment(fullConfigKeyName, configFileInstance.getComment(oldConfigKeyName));
+ }
+ }
+
+ if (success) removeConfig(oldConfigKeyName, transformedConfig.category());
+ final String comments = configInfo.comments();
+
+ if (!comments.isBlank()) configFileInstance.setComment(fullConfigKeyName, comments);
+
+ if (!removed && configFileInstance.get(fullConfigKeyName) != null) break;
+ }
+ }
+ if (removed) {
+ configFileInstance.remove("removed");
+ continue;
+ }
+ if (configFileInstance.get(fullConfigKeyName) != null) continue;
+ if (currentValue == null) {
+ throw new UnsupportedOperationException("Config " + singleConfigModule.getBaseName() + "tried to add an null default value!");
+ }
+
+ final String comments = configInfo.comments();
+
+ if (!comments.isBlank()) {
+ configFileInstance.setComment(fullConfigKeyName, comments);
+ }
+
+ configFileInstance.add(fullConfigKeyName, currentValue);
+ continue;
+ }
+
+ Object actuallyValue;
+ if (stagedConfigMap.containsKey(fullConfigKeyName)) {
+ actuallyValue = stagedConfigMap.get(fullConfigKeyName);
+ if (actuallyValue == null) actuallyValue = defaultvalueMap.get(fullConfigKeyName);
+ stagedConfigMap.remove(fullConfigKeyName);
+ } else {
+ actuallyValue = configFileInstance.get(fullConfigKeyName);
+ }
+ try {
+ actuallyValue = tryTransform(field.get(null).getClass(), actuallyValue);
+ configFileInstance.set(fullConfigKeyName, actuallyValue);
+ } catch (IllegalFormatConversionException e) {
+ resetConfig(fullConfigKeyName);
+ logger.error("Failed to transform config {}, reset to default!", fullConfigKeyName);
+ }
+ field.set(null, actuallyValue);
+ }
+ }
+ }
+
+ public static void removeConfig(String name, String[] keys) {
+ configFileInstance.remove(name);
+ Object configAtPath = configFileInstance.get(String.join(".", keys));
+ if (configAtPath instanceof UnmodifiableConfig && ((UnmodifiableConfig) configAtPath).isEmpty()) {
+ removeConfig(keys);
+ }
+ }
+
+ public static void removeConfig(String[] keys) {
+ configFileInstance.remove(String.join(".", keys));
+ Object configAtPath = configFileInstance.get(String.join(".", Arrays.copyOfRange(keys, 1, keys.length)));
+ if (configAtPath instanceof UnmodifiableConfig && ((UnmodifiableConfig) configAtPath).isEmpty()) {
+ removeConfig(Arrays.copyOfRange(keys, 1, keys.length));
+ }
+ }
+
+ public static boolean setConfig(String[] keys, Object value) {
+ return setConfig(String.join(".", keys), value);
+ }
+
+ public static boolean setConfig(String key, Object value) {
+ if (configFileInstance.contains(key) && configFileInstance.get(key) != null) {
+ stagedConfigMap.put(key, value);
+ return true;
+ }
+ return false;
+ }
+
+ private static Object tryTransform(Class<?> targetType, Object value) {
+ if (!targetType.isAssignableFrom(value.getClass())) {
+ try {
+ if (targetType == Integer.class) {
+ value = Integer.parseInt(value.toString());
+ } else if (targetType == Double.class) {
+ value = Double.parseDouble(value.toString());
+ } else if (targetType == Boolean.class) {
+ value = Boolean.parseBoolean(value.toString());
+ } else if (targetType == Long.class) {
+ value = Long.parseLong(value.toString());
+ } else if (targetType == Float.class) {
+ value = Float.parseFloat(value.toString());
+ } else if (targetType == String.class) {
+ value = value.toString();
+ }
+ } catch (Exception e) {
+ logger.error("Failed to transform value {}!", value);
+ throw new IllegalFormatConversionException((char) 0, targetType);
+ }
+ }
+ return value;
+ }
+
+ public static void saveConfigs() {
+ configFileInstance.save();
+ }
+
+ public static void resetConfig(String[] keys) {
+ resetConfig(String.join(".", keys));
+ }
+
+ public static void resetConfig(String key) {
+ stagedConfigMap.put(key, null);
+ }
+
+ public static String getConfig(String[] keys) {
+ return getConfig(String.join(".", keys));
+ }
+
+ public static String getConfig(String key) {
+ return configFileInstance.get(key).toString();
+ }
+
+ public static List<String> completeConfigPath(String partialPath) {
+ List<String> allPaths = getAllConfigPaths(partialPath);
+ List<String> result = new ArrayList<>();
+
+ for (String path : allPaths) {
+ String remaining = path.substring(partialPath.length());
+ if (remaining.isEmpty()) continue;
+
+ int dotIndex = remaining.indexOf('.');
+ String suggestion = (dotIndex == -1)
+ ? path
+ : partialPath + remaining.substring(0, dotIndex);
+
+ if (!result.contains(suggestion)) {
+ result.add(suggestion);
+ }
+ }
+ return result;
+ }
+
+ private static List<String> getAllConfigPaths(String currentPath) {
+ return defaultvalueMap.keySet().stream()
+ .filter(k -> k.startsWith(currentPath))
+ .toList();
+ }
+
+ public static @NotNull Set<Class<?>> getClasses(String pack) {
+ Set<Class<?>> classes = new LinkedHashSet<>();
+ String packageDirName = pack.replace('.', '/');
+ Enumeration<URL> dirs;
+
+ try {
+ dirs = Thread.currentThread().getContextClassLoader().getResources(packageDirName);
+ while (dirs.hasMoreElements()) {
+ URL url = dirs.nextElement();
+ String protocol = url.getProtocol();
+ if ("file".equals(protocol)) {
+ String filePath = URLDecoder.decode(url.getFile(), StandardCharsets.UTF_8);
+ findClassesInPackageByFile(pack, filePath, classes);
+ } else if ("jar".equals(protocol)) {
+ JarFile jar;
+ try {
+ jar = ((JarURLConnection) url.openConnection()).getJarFile();
+ Enumeration<JarEntry> entries = jar.entries();
+ findClassesInPackageByJar(pack, entries, packageDirName, classes);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ return classes;
+ }
+
+ private static void findClassesInPackageByFile(String packageName, String packagePath, Set<Class<?>> classes) {
+ File dir = new File(packagePath);
+
+ if (!dir.exists() || !dir.isDirectory()) {
+ return;
+ }
+
+ File[] dirfiles = dir.listFiles((file) -> file.isDirectory() || file.getName().endsWith(".class"));
+ if (dirfiles != null) {
+ for (File file : dirfiles) {
+ if (file.isDirectory()) {
+ findClassesInPackageByFile(packageName + "." + file.getName(), file.getAbsolutePath(), classes);
+ } else {
+ String className = file.getName().substring(0, file.getName().length() - 6);
+ try {
+ classes.add(Class.forName(packageName + '.' + className));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ }
+ }
+
+ private static void findClassesInPackageByJar(String packageName, Enumeration<JarEntry> entries, String packageDirName, Set<Class<?>> classes) {
+ while (entries.hasMoreElements()) {
+ JarEntry entry = entries.nextElement();
+ String name = entry.getName();
+ if (name.charAt(0) == '/') {
+ name = name.substring(1);
+ }
+ if (name.startsWith(packageDirName)) {
+ int idx = name.lastIndexOf('/');
+ if (idx != -1) {
+ packageName = name.substring(0, idx).replace('/', '.');
+ }
+ if (name.endsWith(".class") && !entry.isDirectory()) {
+ String className = name.substring(packageName.length() + 1, name.length() - 6);
+ try {
+ classes.add(Class.forName(packageName + '.' + className));
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ }
+ }
+}

View File

@@ -1,14 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/flags/ConfigInfo.java
@@ -1,0 +_,11 @@
+package me.earthme.luminol.config.flags;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface ConfigInfo {
+ String baseName();
+
+ String comments() default "";
+}

View File

@@ -1,11 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/flags/DoNotLoad.java
@@ -1,0 +_,8 @@
+package me.earthme.luminol.config.flags;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface DoNotLoad {
+}

View File

@@ -1,11 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/flags/HotReloadUnsupported.java
@@ -1,0 +_,8 @@
+package me.earthme.luminol.config.flags;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+public @interface HotReloadUnsupported {
+}

View File

@@ -1,29 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/flags/TransformedConfig.java
@@ -1,0 +_,26 @@
+package me.earthme.luminol.config.flags;
+
+import me.earthme.luminol.config.DefaultTransformLogic;
+
+import java.lang.annotation.Repeatable;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Repeatable(TransformedConfig.List.class)
+public @interface TransformedConfig {
+ String name();
+
+ String[] category();
+
+ boolean transform() default true;
+
+ boolean transformComments() default true;
+
+ Class<? extends DefaultTransformLogic>[] transformLogic() default {DefaultTransformLogic.class};
+
+ @Retention(RetentionPolicy.RUNTIME)
+ @interface List {
+ TransformedConfig[] value();
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/experiment/CommandDataConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.experiment;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class CommandDataConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enable")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.EXPERIMENT;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "force_the_data_command_to_be_enabled";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/experiment/DisableAsyncCatcherConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.experiment;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class DisableAsyncCatcherConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.EXPERIMENT;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "disable_async_catchers";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/experiment/DisableEntityCatchConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.experiment;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class DisableEntityCatchConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.EXPERIMENT;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "disable_entity_exception_catchers";
+ }
+}

View File

@@ -1,31 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/fixes/FoliaEntityMovingFixConfig.java
@@ -1,0 +_,28 @@
+package me.earthme.luminol.config.modules.fixes;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class FoliaEntityMovingFixConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled", comments =
+ """
+ A simple fix of an issue on folia\s
+ (Sometimes the entity would\s
+ have a large moment that cross the\s
+ different tick regions, and it would\s
+ make the server crashed) but sometimes it might doesn't work""")
+ public static boolean enabled = false;
+ @ConfigInfo(baseName = "warn_on_detected")
+ public static boolean warnOnDetected = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.FIXES;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "folia.fix_high_velocity_issue";
+ }
+}

View File

@@ -1,28 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/fixes/FoliaPOIAccessOffRegionFixConfig.java
@@ -1,0 +_,25 @@
+package me.earthme.luminol.config.modules.fixes;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class FoliaPOIAccessOffRegionFixConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled", comments =
+ """
+ The POIManager of folia has something which has not been patched\s
+ for regionized ticking and these would trigger the async catcher\s
+ and make the server crash.If you would like to prevent it and didn't\s
+ mind the side effect(currently unknown), you can enable this""")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.FIXES;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "folia.fix_poi_access_off_region";
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/fixes/ForceCleanupEntityBrainMemoryConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.fixes;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class ForceCleanupEntityBrainMemoryConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled_for_entity", comments = "When enabled, the entity's brain will clean the memory which is typed of entity and not belong to current tickregion")
+ public static boolean enabledForEntity = false;
+ @ConfigInfo(baseName = "enabled_for_block_pos", comments = "When enabled, the entity's brain will clean the memory which is typed of block_pos and not belong to current tickregion")
+ public static boolean enabledForBlockPos = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.EXPERIMENT;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "force_cleanup_drop_non_owned_entity_memory_module";
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/fixes/UnsafeTeleportationConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.fixes;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class UnsafeTeleportationConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled", comments = "Allow non player entities enter end portals if enabled.\n" +
+ "If you want to use sand duping,please turn on this.\n" +
+ "Warning: This would cause some unsafe issues, you could learn more on : https://github.com/PaperMC/Folia/issues/297")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.FIXES;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "allow_unsafe_teleportation";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/fixes/VanillaRandomSourceConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.fixes;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class VanillaRandomSourceConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enable_for_player_entity", comments = "Related with RNG cracks")
+ public static boolean useLegacyRandomSourceForPlayers = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.FIXES;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "use_vanilla_random_source";
+ }
+}

View File

@@ -1,28 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/CollisionBehaviorConfig.java
@@ -1,0 +_,25 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class CollisionBehaviorConfig implements IConfigModule {
+ @ConfigInfo(baseName = "mode", comments =
+ """
+ Available Value:
+ VANILLA
+ BLOCK_SHAPE_VANILLA
+ PAPER""")
+ public static String behaviorMode = "BLOCK_SHAPE_VANILLA";
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "collision_behavior";
+ }
+}

View File

@@ -1,80 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/CpuAffinityConfig.java
@@ -1,0 +_,77 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import com.mojang.logging.LogUtils;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import net.openhft.affinity.Affinity;
+import org.slf4j.Logger;
+
+import java.util.BitSet;
+import java.util.List;
+
+public class CpuAffinityConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean cpuAffinityEnabled = false;
+ @ConfigInfo(baseName = "tickregion_affinity")
+ public static List<String> tickRegionAffinity = Affinity.getAffinity()
+ .stream()
+ .mapToObj(String::valueOf)
+ .toList();
+
+ @DoNotLoad
+ private static boolean inited = false;
+ @DoNotLoad
+ private static final Logger LOGGER = LogUtils.getLogger();
+ @DoNotLoad
+ public static BitSet tickRegionAffinityBitSet;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "cpu_affinity";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ if (!cpuAffinityEnabled) return;
+
+ tickRegionAffinityBitSet = parseAffinity(tickRegionAffinity);
+ LOGGER.info("Tick region thread now bound to: {}", tickRegionAffinityBitSet);
+
+ if (!inited) {
+ inited = true;
+ }
+ }
+
+ private BitSet parseAffinity(List<String> affinity) {
+ int maxAvailable = Runtime.getRuntime().availableProcessors();
+ BitSet affinitySet = new BitSet(affinity.size());
+ affinity.stream()
+ .mapToInt(str -> {
+ try {
+ return Integer.parseInt(str);
+ } catch (NumberFormatException ignored) {
+ LOGGER.warn("Unable to parse cpu id {} to a valid number, falling back to 0.", str);
+ return 0;
+ }
+ })
+ .distinct()
+ .filter(cpuId -> {
+ if (cpuId >= 0 && cpuId < maxAvailable) {
+ return true;
+ } else {
+ LOGGER.warn("Invalid cpu id {}, ignoring.", cpuId);
+ return false;
+ }
+ })
+ .forEach(affinitySet::set);
+ return affinitySet;
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/DisableHeightmapWarnConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class DisableHeightmapWarnConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled", comments =
+ """
+ Disable heightmap-check's warning""")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "heightmap_warn_disable";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/DisableMovedWronglyThreshold.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class DisableMovedWronglyThreshold implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "disable_moved_wrongly_threshold";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/FoliaWatchogConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class FoliaWatchogConfig implements IConfigModule {
+ @ConfigInfo(baseName = "tick_region_time_out_ms")
+ public static int tickRegionTimeOutMs = 5000;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "folia_watchdog";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/InorderChatConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class InorderChatConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "mojang_out_of_order_chat_check";
+ }
+}

View File

@@ -1,26 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/KaiijuEntityLimiterConfig.java
@@ -1,0 +_,23 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import dev.kaiijumc.kaiiju.KaiijuEntityLimits;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+
+public class KaiijuEntityLimiterConfig implements IConfigModule {
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "kaiiju_entity_limiter";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ KaiijuEntityLimits.init();
+ }
+}

View File

@@ -1,53 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/MembarConfig.java
@@ -1,0 +_,50 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import me.earthme.luminol.commands.MembarCommand;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import me.earthme.luminol.functions.GlobalServerMemoryBar;
+import org.bukkit.Bukkit;
+
+import java.util.List;
+
+public class MembarConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean memoryBarEnabled = false;
+ @ConfigInfo(baseName = "format")
+ public static String memBarFormat = "<gray>Memory usage <yellow>:</yellow> <used>MB<yellow>/</yellow><available>MB";
+ @ConfigInfo(baseName = "memory_color_list")
+ public static List<String> memColors = List.of("GREEN", "YELLOW", "RED", "PURPLE");
+ @ConfigInfo(baseName = "update_interval_ticks")
+ public static int updateInterval = 15;
+
+ @DoNotLoad
+ private static boolean inited = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "membar";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ if (memoryBarEnabled) {
+ GlobalServerMemoryBar.init();
+ } else {
+ GlobalServerMemoryBar.cancelBarUpdateTask();
+ }
+
+ if (!inited) {
+ Bukkit.getCommandMap().register("membar", "luminol", new MembarCommand("membar"));
+ inited = true;
+ }
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/OfflineModeWarningConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class OfflineModeWarningConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "warn_on_offline_mode";
+ }
+}

View File

@@ -1,24 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/PublickeyVerifyConfig.java
@@ -1,0 +_,21 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class PublickeyVerifyConfig implements IConfigModule {
+
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "verify_publickey_only_in_online_mode";
+ }
+}

View File

@@ -1,53 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/RegionBarConfig.java
@@ -1,0 +_,50 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import me.earthme.luminol.commands.RegionBarCommand;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import me.earthme.luminol.functions.GlobalServerRegionBar;
+import org.bukkit.Bukkit;
+
+import java.util.List;
+
+public class RegionBarConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean regionbarEnabled = false;
+ @ConfigInfo(baseName = "format")
+ public static String regionBarFormat = "<gray>Util<yellow>:</yellow> <util> Chunks<yellow>:</yellow> <green><chunks></green> Players<yellow>:</yellow> <green><players></green> Entities<yellow>:</yellow> <green><entities></green>";
+ @ConfigInfo(baseName = "util_color_list")
+ public static List<String> utilColors = List.of("GREEN", "YELLOW", "RED", "PURPLE");
+ @ConfigInfo(baseName = "update_interval_ticks")
+ public static int updateInterval = 15;
+
+ @DoNotLoad
+ private static boolean inited = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "regionbar";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ if (regionbarEnabled) {
+ GlobalServerRegionBar.init();
+ } else {
+ GlobalServerRegionBar.cancelBarUpdateTask();
+ }
+
+ if (!inited) {
+ Bukkit.getCommandMap().register("regionbar", "luminol", new RegionBarCommand("regionbar"));
+ inited = true;
+ }
+ }
+}

View File

@@ -1,66 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/RegionFormatConfig.java
@@ -1,0 +_,63 @@
+package me.earthme.luminol.config.modules.misc;
+
+import abomination.LinearRegionFile;
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import me.earthme.luminol.config.flags.HotReloadUnsupported;
+import me.earthme.luminol.utils.EnumRegionFormat;
+import net.minecraft.server.MinecraftServer;
+
+public class RegionFormatConfig implements IConfigModule {
+ @HotReloadUnsupported
+ @ConfigInfo(baseName = "format")
+ public static String format = "MCA";
+ @HotReloadUnsupported
+ @ConfigInfo(baseName = "linear_compression_level")
+ public static int linearCompressionLevel = 1;
+ @HotReloadUnsupported
+ @ConfigInfo(baseName = "linear_io_thread_count")
+ public static int linearIoThreadCount = 6;
+ @HotReloadUnsupported
+ @ConfigInfo(baseName = "linear_io_flush_delay_ms")
+ public static int linearIoFlushDelayMs = 100;
+ @HotReloadUnsupported
+ @ConfigInfo(baseName = "linear_use_virtual_thread")
+ public static boolean linearUseVirtualThread = true;
+
+ @DoNotLoad
+ public static EnumRegionFormat regionFormat;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "region_format";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ regionFormat = EnumRegionFormat.fromString(format.toUpperCase());
+
+ if (regionFormat == null) {
+ throw new RuntimeException("Invalid region format: " + format);
+ }
+
+ if (regionFormat == EnumRegionFormat.LINEAR_V2) {
+ if (RegionFormatConfig.linearCompressionLevel > 23 || RegionFormatConfig.linearCompressionLevel < 1) {
+ MinecraftServer.LOGGER.error("Linear region compression level should be between 1 and 22 in config: {}", RegionFormatConfig.linearCompressionLevel);
+ MinecraftServer.LOGGER.error("Falling back to compression level 1.");
+ RegionFormatConfig.linearCompressionLevel = 1;
+ }
+
+ LinearRegionFile.SAVE_DELAY_MS = linearIoFlushDelayMs;
+ LinearRegionFile.SAVE_THREAD_MAX_COUNT = linearIoThreadCount;
+ LinearRegionFile.USE_VIRTUAL_THREAD = linearUseVirtualThread;
+ }
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/SecureSeedConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class SecureSeedConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled", comments = """
+ Once you enable secure seed, all ores and structures are generated with 1024-bit seed
+ instead of using 64-bit seed in vanilla, made seed cracker become impossible.""")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "secure_seed";
+ }
+}

View File

@@ -1,50 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/SentryConfig.java
@@ -1,0 +_,47 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import org.apache.logging.log4j.Level;
+
+public class SentryConfig implements IConfigModule {
+
+ @ConfigInfo(baseName = "dsn", comments =
+ " Sentry DSN for improved error logging, leave blank to disable,\n" +
+ " Obtain from https://sentry.io/")
+ public static String sentryDsn = "";
+
+ @ConfigInfo(baseName = "log_level", comments = " Logs with a level higher than or equal to this level will be recorded.")
+ public static String logLevel = "WARN";
+
+ @ConfigInfo(baseName = "only_log_thrown", comments = " Only log with a Throwable will be recorded after enabling this.")
+ public static boolean onlyLogThrown = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "sentry";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ String sentryEnvironment = System.getenv("SENTRY_DSN");
+
+ sentryDsn = sentryEnvironment != null && !sentryEnvironment.isBlank()
+ ? sentryEnvironment
+ : configInstance.getOrElse("sentry.dsn", sentryDsn);
+
+ logLevel = configInstance.getOrElse("sentry.log-level", logLevel);
+ onlyLogThrown = configInstance.getOrElse("sentry.only-log-thrown", onlyLogThrown);
+
+ if (sentryDsn != null && !sentryDsn.isBlank()) {
+ gg.pufferfish.pufferfish.sentry.SentryManager.init(Level.getLevel(logLevel));
+ }
+ }
+}

View File

@@ -1,26 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/ServerModNameConfig.java
@@ -1,0 +_,23 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class ServerModNameConfig implements IConfigModule {
+ @ConfigInfo(baseName = "name")
+ public static String serverModName = "Luminol";
+
+ @ConfigInfo(baseName = "vanilla_spoof")
+ public static boolean fakeVanilla = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "server_mod_name";
+ }
+}

View File

@@ -1,57 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/TpsBarConfig.java
@@ -1,0 +_,54 @@
+package me.earthme.luminol.config.modules.misc;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import me.earthme.luminol.commands.TpsBarCommand;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import me.earthme.luminol.functions.GlobalServerTpsBar;
+import org.bukkit.Bukkit;
+
+import java.util.List;
+
+public class TpsBarConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean tpsbarEnabled = false;
+ @ConfigInfo(baseName = "format")
+ public static String tpsBarFormat = "<gray>TPS<yellow>:</yellow> <tps> MSPT<yellow>:</yellow> <mspt> Ping<yellow>:</yellow> <ping>ms ChunkHot<yellow>:</yellow> <chunkhot>";
+ @ConfigInfo(baseName = "tps_color_list")
+ public static List<String> tpsColors = List.of("GREEN", "YELLOW", "RED", "PURPLE");
+ @ConfigInfo(baseName = "ping_color_list")
+ public static List<String> pingColors = List.of("GREEN", "YELLOW", "RED", "PURPLE");
+ @ConfigInfo(baseName = "chunkhot_color_list")
+ public static List<String> chunkHotColors = List.of("GREEN", "YELLOW", "RED", "PURPLE");
+ @ConfigInfo(baseName = "update_interval_ticks")
+ public static int updateInterval = 15;
+
+ @DoNotLoad
+ private static boolean inited = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "tpsbar";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ if (tpsbarEnabled) {
+ GlobalServerTpsBar.init();
+ } else {
+ GlobalServerTpsBar.cancelBarUpdateTask();
+ }
+
+ if (!inited) {
+ Bukkit.getCommandMap().register("tpsbar", "luminol", new TpsBarCommand("tpsbar"));
+ inited = true;
+ }
+ }
+}

View File

@@ -1,32 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/TripwireBehaviorConfig.java
@@ -1,0 +_,29 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.TransformedConfig;
+
+public class TripwireBehaviorConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+ @TransformedConfig(name = "behavior-mode", category = {"misc", "tripwire_dupe"})
+ @ConfigInfo(baseName = "behavior_mode", comments =
+ """
+ Available Value:
+ VANILLA20
+ VANILLA21
+ MIXED""")
+ public static String behaviorMode = "VANILLA21";
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "tripwire_dupe";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/misc/UsernameCheckConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.misc;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class UsernameCheckConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.MISC;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "username_checks";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/EntityGoalSelectorInactiveTickConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class EntityGoalSelectorInactiveTickConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "skip_goal_selector_tick_in_inactive_tick";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/GaleVariableEntityWakeupConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class GaleVariableEntityWakeupConfig implements IConfigModule {
+ @ConfigInfo(baseName = "entity_wakeup_duration_ratio_standard_deviation")
+ public static double entityWakeUpDurationRatioStandardDeviation = 0.2;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "variable_entity_waking_up";
+ }
+}

View File

@@ -1,27 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/LobotomizeVillageConfig.java
@@ -1,0 +_,24 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class LobotomizeVillageConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean villagerLobotomizeEnabled = false;
+ @ConfigInfo(baseName = "check_interval")
+ public static int villagerLobotomizeCheckInterval = 100;
+ @ConfigInfo(baseName = "wait_until_trade_locked")
+ public static boolean villagerLobotomizeWaitUntilTradeLocked = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "lobotomize_villager";
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/PetalReduceSensorWorkConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class PetalReduceSensorWorkConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = true;
+ @ConfigInfo(baseName = "delay_ticks")
+ public static int delayTicks = 10;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "reduce_sensor_work";
+ }
+}

View File

@@ -1,25 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/ProjectileChunkReduceConfig.java
@@ -1,0 +_,22 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class ProjectileChunkReduceConfig implements IConfigModule {
+ @ConfigInfo(baseName = "max-loads-per-tick")
+ public static int maxProjectileLoadsPerTick;
+ @ConfigInfo(baseName = "max-loads-per-projectile")
+ public static int maxProjectileLoadsPerProjectile;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "projectile";
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/PurpurAlternativeKeepaliveConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class PurpurAlternativeKeepaliveConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean useAlternateKeepAlive = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "alternative_keepalive_handling";
+ }
+}

View File

@@ -1,53 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/SIMDConfig.java
@@ -1,0 +_,50 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import com.electronwill.nightconfig.core.file.CommentedFileConfig;
+import com.mojang.logging.LogUtils;
+import gg.pufferfish.pufferfish.simd.SIMDDetection;
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.DoNotLoad;
+import org.slf4j.Logger;
+
+public class SIMDConfig implements IConfigModule {
+ @DoNotLoad
+ private static final Logger LOGGER = LogUtils.getLogger();
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "use_simd";
+ }
+
+ @Override
+ public void onLoaded(CommentedFileConfig configInstance) {
+ if (!enabled) {
+ return;
+ }
+
+ // Attempt to detect vectorization
+ try {
+ SIMDDetection.isEnabled = SIMDDetection.canEnable(LOGGER);
+ } catch (NoClassDefFoundError | Exception ignored) {
+ ignored.printStackTrace();
+ }
+
+ if (SIMDDetection.isEnabled) {
+ LOGGER.info("SIMD operations detected as functional. Will replace some operations with faster versions.");
+ } else {
+ LOGGER.warn("SIMD operations are available for your server, but are not configured!");
+ LOGGER.warn("To enable additional optimizations, add \"--add-modules=jdk.incubator.vector\" to your startup flags, BEFORE the \"-jar\".");
+ LOGGER.warn("If you have already added this flag, then SIMD operations are not supported on your JVM or CPU.");
+ LOGGER.warn("Debug: Java: {}, test run: {}", System.getProperty("java.version"), SIMDDetection.testRun);
+ }
+ }
+}

View File

@@ -1,23 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/optimizations/SuffocationOptimizationConfig.java
@@ -1,0 +_,20 @@
+package me.earthme.luminol.config.modules.optimizations;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+
+public class SuffocationOptimizationConfig implements IConfigModule {
+ @ConfigInfo(baseName = "enabled")
+ public static boolean enabled = false;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.OPTIMIZATIONS;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "suffocation_optimization";
+ }
+}

View File

@@ -1,43 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/config/modules/removed/RemovedConfig.java
@@ -1,0 +_,40 @@
+package me.earthme.luminol.config.modules.removed;
+
+import me.earthme.luminol.config.EnumConfigCategory;
+import me.earthme.luminol.config.IConfigModule;
+import me.earthme.luminol.config.flags.ConfigInfo;
+import me.earthme.luminol.config.flags.TransformedConfig;
+
+public class RemovedConfig implements IConfigModule {
+ @TransformedConfig(name = "enabled", category = {"experiment", "force_enable_command_block_execution"}, transform = false)
+ @TransformedConfig(name = "enabled", category = {"experiment", "enable_tick_command"}, transform = false)
+ @TransformedConfig(name = "barrel_rows", category = {"misc", "container_expansion"}, transform = false)
+ @TransformedConfig(name = "enderchest_rows", category = {"misc", "container_expansion"}, transform = false)
+ @TransformedConfig(name = "disable_end_crystal_check", category = {"misc", "end_crystal"}, transform = false)
+ @TransformedConfig(name = "enabled", category = {"experiment", "entity_damage_source_trace"}, transform = false)
+ @TransformedConfig(name = "allow_bad_omen_trigger_raid", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "give_bad_omen_when_kill_patrol_leader", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "bad_omen_infinite", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "skip_height_check", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "skip_self_raid_check", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "revert_274911", category = {"misc", "revert_raid_changes"}, transform = false)
+ @TransformedConfig(name = "enabled", category = {"experiment", "ray_tracking_entity_tracker"}, transform = false)
+ @TransformedConfig(name = "skip_marker_armor_stands", category = {"experiment", "ray_tracking_entity_tracker"}, transform = false)
+ @TransformedConfig(name = "check_interval_ms", category = {"experiment", "ray_tracking_entity_tracker"}, transform = false)
+ @TransformedConfig(name = "tracing_distance", category = {"experiment", "ray_tracking_entity_tracker"}, transform = false)
+ @TransformedConfig(name = "hitbox_limit", category = {"experiment", "ray_tracking_entity_tracker"}, transform = false)
+ @ConfigInfo(baseName = "removed", comments =
+ """
+ RemovedConfig redirect to here, no any function.""")
+ public static boolean enabled = true;
+
+ @Override
+ public EnumConfigCategory getCategory() {
+ return EnumConfigCategory.REMOVED;
+ }
+
+ @Override
+ public String getBaseName() {
+ return "removed_config";
+ }
+}

View File

@@ -1,634 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/data/BufferedLinearRegionFile.java
@@ -1,0 +_,631 @@
+package me.earthme.luminol.data;
+
+import abomination.IRegionFile;
+import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO;
+import me.earthme.luminol.utils.DirectBufferReleaser;
+import net.jpountz.xxhash.XXHash32;
+import net.jpountz.xxhash.XXHashFactory;
+import net.minecraft.nbt.CompoundTag;
+import net.minecraft.world.level.ChunkPos;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+public class BufferedLinearRegionFile implements IRegionFile {
+ private static final double AUTO_COMPACT_PERCENT = 3.0 / 5.0; // 60 %
+ private static final long AUTO_COMPACT_SIZE = 1024 * 1024; // 1 MiB
+
+ private static final long SUPER_BLOCK = 0x1145141919810L;
+ private static final int HASH_SEED = 0x0721; // (∠・ω< )⌒★
+ private static final byte VERSION = 0x01; // ver 1.0
+
+ private final Path filePath;
+
+ private final ReadWriteLock fileAccessLock = new ReentrantReadWriteLock();
+ private final XXHash32 xxHash32 = XXHashFactory.fastestInstance().hash32();
+ private final Sector[] sectors = new Sector[1024];
+ private long currentAcquiredIndex = this.headerSize();
+ private byte compressionLevel = 6;
+ private int xxHash32Seed = HASH_SEED;
+ private FileChannel channel;
+
+ public BufferedLinearRegionFile(Path filePath, int compressionLevel) throws IOException {
+ this(filePath);
+
+ this.compressionLevel = (byte) compressionLevel;
+ }
+
+ public BufferedLinearRegionFile(Path filePath) throws IOException {
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ this.filePath = filePath;
+
+ // fill default sectors
+ for (int i = 0; i < 1024; i++) {
+ this.sectors[i] = new Sector(i, this.headerSize(), 0);
+ }
+
+ // load sectors
+ this.readHeaders();
+ }
+
+ private void readHeaders() throws IOException {
+ if (this.channel.size() < this.headerSize()) {
+ return;
+ }
+
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(buffer, 0);
+ buffer.flip();
+
+ if (buffer.getLong() != SUPER_BLOCK || buffer.get() != VERSION) {
+ throw new IOException("Invalid file format or version mismatch");
+ }
+
+ this.compressionLevel = buffer.get(); // Compression level (not used)
+ this.xxHash32Seed = buffer.getInt(); // XXHash32 seed
+ this.currentAcquiredIndex = buffer.getLong(); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ sector.restoreFrom(buffer);
+ if (sector.hasData()) {
+ // recompute if acquired index is corrupted
+ this.currentAcquiredIndex = Math.max(this.currentAcquiredIndex, sector.offset + sector.length);
+ }
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private void writeHeaders() throws IOException {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(this.headerSize());
+
+ buffer.putLong(SUPER_BLOCK); // Magic
+ buffer.put(VERSION); // Version
+ buffer.put(this.compressionLevel); // Compression level
+ buffer.putInt(this.xxHash32Seed); // XXHash32 seed
+ buffer.putLong(this.currentAcquiredIndex); // Acquired index
+
+ for (Sector sector : this.sectors) {
+ // encode each sector
+ buffer.put(sector.getEncoded());
+ }
+
+ buffer.flip();
+
+ long offset = 0;
+ while (buffer.hasRemaining()) {
+ offset += this.channel.write(buffer, offset);
+ }
+
+ DirectBufferReleaser.clean(buffer);
+ }
+
+ private int sectorSize() {
+ return this.sectors.length * Sector.sizeOfSingle();
+ }
+
+ private int headerSize() {
+ int result = 0;
+
+ result += Long.BYTES; // Magic
+ result += Byte.BYTES; // Version
+ result += Byte.BYTES; // Compression level
+ result += Integer.BYTES; // XXHash32 seed
+ result += Long.BYTES; // Acquired index
+ result += this.sectorSize(); // Sectors
+
+ return result;
+ }
+
+ private void flushInternal() throws IOException {
+ // save headers
+ this.writeHeaders();
+
+ long spareSize = this.channel.size();
+
+ spareSize -= this.headerSize();
+ for (Sector sector : this.sectors) {
+ spareSize -= sector.length;
+ }
+
+ long sectorSize = 0;
+ for (Sector sector : this.sectors) {
+ sectorSize += sector.length;
+ }
+
+ // try auto compact to clean the garbage area
+ if (spareSize > AUTO_COMPACT_SIZE && (double)spareSize > ((double)sectorSize) * AUTO_COMPACT_PERCENT) {
+ this.compact();
+ }
+ }
+
+ private void closeInternal() throws IOException {
+ this.writeHeaders();
+ this.channel.force(true);
+ // force compact
+ this.compact();
+ this.channel.close();
+ }
+
+ private void compact() throws IOException {
+ this.writeHeaders(); // save headers for compact
+ this.channel.force(true);
+ try (FileChannel tempChannel = FileChannel.open(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ )){
+ // get the latest head in file
+ final ByteBuffer headerBuffer = ByteBuffer.allocateDirect(this.headerSize());
+ this.channel.read(headerBuffer, 0);
+ headerBuffer.flip();
+
+ long offsetHeader = 0;
+ while (headerBuffer.hasRemaining()) {
+ offsetHeader += tempChannel.write(headerBuffer, offsetHeader);
+ }
+ DirectBufferReleaser.clean(headerBuffer);
+
+ int offsetPointer = this.headerSize();
+ for (Sector sector : this.sectors) {
+ // skip cleared or no data-contained sectors
+ if (!sector.hasData()) {
+ continue;
+ }
+
+ // only read the available data
+ final ByteBuffer sectorData = sector.read(this.channel);
+ final int length = sectorData.remaining();
+
+ // recalculate the offset and length
+ final Sector newRecalculated = new Sector(sector.index, offsetPointer, length);
+ offsetPointer += length;
+ this.sectors[sector.index] = newRecalculated; // update sector infos
+
+ newRecalculated.hasData = true;
+
+ long offset = newRecalculated.offset;
+ while (sectorData.hasRemaining()) {
+ offset += tempChannel.write(sectorData, offset);
+ }
+
+ DirectBufferReleaser.clean(sectorData);
+ }
+
+ tempChannel.force(true);
+ this.currentAcquiredIndex = tempChannel.size();
+ }
+
+ this.channel.close();
+
+ Files.move(
+ new File(this.filePath.toString() + ".tmp").toPath(),
+ this.filePath,
+ java.nio.file.StandardCopyOption.REPLACE_EXISTING
+ );
+
+ this.reopenChannel();
+ this.writeHeaders();
+ }
+
+ private void reopenChannel() throws IOException {
+ if (this.channel.isOpen()) {
+ this.channel.close();
+ }
+
+ this.channel = FileChannel.open(
+ filePath,
+ StandardOpenOption.CREATE,
+ StandardOpenOption.WRITE,
+ StandardOpenOption.READ
+ );
+ }
+
+ private void writeChunkDataRaw(int chunkOrdinal, ByteBuffer chunkData) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.store(chunkData, this.channel);
+ }
+
+ private @Nullable ByteBuffer readChunkDataRaw(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ if (!sector.hasData()) {
+ return null;
+ }
+
+ return sector.read(this.channel);
+ }
+
+ private void clearChunkData(int chunkOrdinal) throws IOException {
+ final Sector sector = this.sectors[chunkOrdinal];
+
+ sector.clear();
+
+ this.writeHeaders();
+ }
+
+ private static int getChunkIndex(int x, int z) {
+ return (x & 31) + ((z & 31) << 5);
+ }
+
+ private boolean hasData(int chunkOriginal) {
+ return this.sectors[chunkOriginal].hasData();
+ }
+
+ private void writeChunk(int x, int z, @NotNull ByteBuffer data) throws IOException {
+ final int chunkIndex = getChunkIndex(x, z);
+
+ final int oldPositionOfData = data.position();
+ final int xxHash32OfData = this.xxHash32.hash(data, this.xxHash32Seed);
+ data.position(oldPositionOfData);
+
+ final ByteBuffer compressedData = this.compress(this.ensureDirectBuffer(data));
+ // uncompressed length + timestamp + xxhash32
+ final ByteBuffer chunkSectionBuilder = ByteBuffer.allocateDirect(compressedData.remaining() + 4 + 8 + 4);
+
+ chunkSectionBuilder.putInt(data.remaining()); // Uncompressed length
+ chunkSectionBuilder.putLong(System.nanoTime()); // Timestamp
+ chunkSectionBuilder.putInt(xxHash32OfData); // xxHash32 of the original data
+ chunkSectionBuilder.put(compressedData); // Compressed data
+ chunkSectionBuilder.flip();
+
+ this.writeChunkDataRaw(chunkIndex, chunkSectionBuilder);
+ DirectBufferReleaser.clean(chunkSectionBuilder);
+ }
+
+ private @Nullable ByteBuffer readChunk(int x, int z) throws IOException {
+ final ByteBuffer compressed = this.readChunkDataRaw(getChunkIndex(x, z));
+
+ if (compressed == null) {
+ return null;
+ }
+
+ final int uncompressedLength = compressed.getInt(); // compressed length
+ final long timestamp = compressed.getLong(); // TODO use this timestamp for something?
+ final int dataXXHash32 = compressed.getInt(); // XXHash32 for validation
+
+ final ByteBuffer decompressed = this.decompress(this.ensureDirectBuffer(compressed), uncompressedLength);
+
+ DirectBufferReleaser.clean(compressed);
+
+ final IOException xxHash32CheckFailedEx = this.checkXXHash32(dataXXHash32, decompressed);
+ if (xxHash32CheckFailedEx != null) {
+ throw xxHash32CheckFailedEx; // prevent from loading
+ }
+
+ return decompressed;
+ }
+
+ private @NotNull ByteBuffer ensureDirectBuffer(@NotNull ByteBuffer buffer) {
+ if (buffer.isDirect()) {
+ return buffer;
+ }
+
+ ByteBuffer direct = ByteBuffer.allocateDirect(buffer.remaining());
+ int originalPosition = buffer.position();
+ direct.put(buffer);
+ direct.flip();
+ buffer.position(originalPosition);
+
+ return direct;
+ }
+
+ private @NotNull ByteBuffer compress(@NotNull ByteBuffer input) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] compressed = com.github.luben.zstd.Zstd.compress(inputArray, this.compressionLevel);
+
+ ByteBuffer result = ByteBuffer.allocateDirect(compressed.length);
+ result.put(compressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Compression failed for input size: " + input.remaining(), e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @NotNull ByteBuffer decompress(@NotNull ByteBuffer input, int originalSize) throws IOException {
+ final int originalPosition = input.position();
+ final int originalLimit = input.limit();
+
+ try {
+ byte[] inputArray;
+ int inputLength = input.remaining();
+
+ if (input.hasArray()) {
+ inputArray = input.array();
+ int arrayOffset = input.arrayOffset() + input.position();
+ if (arrayOffset != 0 || inputLength != inputArray.length) {
+ byte[] temp = new byte[inputLength];
+ System.arraycopy(inputArray, arrayOffset, temp, 0, inputLength);
+ inputArray = temp;
+ }
+ } else {
+ inputArray = new byte[inputLength];
+ input.get(inputArray);
+ input.position(originalPosition);
+ }
+
+ byte[] decompressed = com.github.luben.zstd.Zstd.decompress(inputArray, originalSize);
+
+ if (decompressed.length != originalSize) {
+ throw new IOException("Decompression size mismatch: expected " +
+ originalSize + ", got " + decompressed.length);
+ }
+
+ ByteBuffer result = ByteBuffer.allocateDirect(originalSize);
+ result.put(decompressed);
+ result.flip();
+
+ return result;
+
+ } catch (Exception e) {
+ throw new IOException("Decompression failed", e);
+ } finally {
+ input.position(originalPosition);
+ input.limit(originalLimit);
+ }
+ }
+
+ private @Nullable IOException checkXXHash32(long originalXXHash32, @NotNull ByteBuffer input) {
+ final int oldPositionOfInput = input.position();
+ final int currentXXHash32 = this.xxHash32.hash(input, this.xxHash32Seed);
+ input.position(oldPositionOfInput);
+
+ if (originalXXHash32 != currentXXHash32) {
+ return new IOException("XXHash32 check failed ! Expected: " + originalXXHash32 + ",but got: " + currentXXHash32);
+ }
+
+ return null;
+ }
+
+ @Override
+ public Path getPath() {
+ return this.filePath;
+ }
+
+ @Override
+ public DataInputStream getChunkDataInputStream(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.readLock().lock();
+ try {
+ final ByteBuffer data = this.readChunk(pos.x, pos.z);
+
+ if (data == null) {
+ return null;
+ }
+
+ final byte[] dataBytes = new byte[data.remaining()];
+ data.get(dataBytes);
+
+ DirectBufferReleaser.clean(data);
+
+ return new DataInputStream(new ByteArrayInputStream(dataBytes));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean doesChunkExist(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public DataOutputStream getChunkDataOutputStream(ChunkPos pos) {
+ return new DataOutputStream(new ChunkBufferHelper(pos));
+ }
+
+ @Override
+ public void clear(@NotNull ChunkPos pos) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.clearChunkData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public boolean hasChunk(@NotNull ChunkPos pos) {
+ this.fileAccessLock.readLock().lock();
+ try {
+ return this.hasData(getChunkIndex(pos.x, pos.z));
+ }finally {
+ this.fileAccessLock.readLock().unlock();
+ }
+ }
+
+ @Override
+ public void write(@NotNull ChunkPos pos, ByteBuffer buf) throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.writeChunk(pos.x, pos.z, buf);
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ // MCC 的玩意,这东西也用不上给Linear了()
+ @Override
+ public CompoundTag getOversizedData(int x, int z) {
+ return null;
+ }
+
+ @Override
+ public boolean isOversized(int x, int z) {
+ return false;
+ }
+
+ @Override
+ public boolean recalculateHeader() {
+ return false;
+ }
+
+ @Override
+ public void setOversized(int x, int z, boolean oversized) {
+
+ }
+ // MCC end
+
+ @Override
+ public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) {
+ final DataOutputStream out = this.getChunkDataOutputStream(pos);
+
+ return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData(
+ data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE,
+ out, regionFile -> out.close()
+ );
+ }
+
+ @Override
+ public void flush() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.flushInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ this.fileAccessLock.writeLock().lock();
+ try {
+ this.closeInternal();
+ }finally {
+ this.fileAccessLock.writeLock().unlock();
+ }
+ }
+
+ private class Sector{
+ private final int index;
+ private long offset;
+ private long length;
+ private boolean hasData = false;
+
+ private Sector(int index, long offset, long length) {
+ this.index = index;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ public @NotNull ByteBuffer read(@NotNull FileChannel channel) throws IOException {
+ final ByteBuffer result = ByteBuffer.allocateDirect((int) this.length);
+
+ channel.read(result, this.offset);
+ result.flip();
+
+ return result;
+ }
+
+ public void store(@NotNull ByteBuffer newData, @NotNull FileChannel channel) throws IOException {
+ this.hasData = true;
+ this.length = newData.remaining();
+ this.offset = currentAcquiredIndex;
+
+ BufferedLinearRegionFile.this.currentAcquiredIndex += this.length;
+
+ long offset = this.offset;
+ while (newData.hasRemaining()) {
+ offset += channel.write(newData, offset);
+ }
+ }
+
+ private @NotNull ByteBuffer getEncoded() {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(sizeOfSingle());
+
+ buffer.putLong(this.offset);
+ buffer.putLong(this.length);
+ buffer.put((byte) (this.hasData ? 1 : 0));
+ buffer.flip();
+
+ return buffer;
+ }
+
+ public void restoreFrom(@NotNull ByteBuffer buffer) {
+ this.offset = buffer.getLong();
+ this.length = buffer.getLong();
+ this.hasData = buffer.get() == 1;
+
+ if (this.length < 0 || this.offset < 0) {
+ throw new IllegalStateException("Invalid sector data: " + this);
+ }
+ }
+
+ public void clear() {
+ this.hasData = false;
+ }
+
+ public boolean hasData() {
+ return this.hasData;
+ }
+
+ static int sizeOfSingle() {
+ // offset + length hasData
+ return Long.BYTES * 2 + 1;
+ }
+ }
+
+ private class ChunkBufferHelper extends ByteArrayOutputStream {
+ private final ChunkPos pos;
+
+ private ChunkBufferHelper(ChunkPos pos) {
+ this.pos = pos;
+ }
+
+ @Override
+ public void close() throws IOException {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().lock();
+ try {
+ ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count);
+
+ BufferedLinearRegionFile.this.writeChunk(this.pos.x, this.pos.z, bytebuffer);
+ }finally {
+ BufferedLinearRegionFile.this.fileAccessLock.writeLock().unlock();
+ }
+ }
+ }
+}

View File

@@ -1,174 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/functions/GlobalServerMemoryBar.java
@@ -1,0 +_,171 @@
+package me.earthme.luminol.functions;
+
+import com.google.common.collect.Maps;
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.threadedregions.scheduler.ScheduledTask;
+import me.earthme.luminol.config.modules.misc.MembarConfig;
+import me.earthme.luminol.utils.NullPlugin;
+import net.kyori.adventure.bossbar.BossBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.minimessage.MiniMessage;
+import net.kyori.adventure.text.minimessage.tag.resolver.Placeholder;
+import org.bukkit.Bukkit;
+import org.bukkit.craftbukkit.entity.CraftPlayer;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
+import java.util.*;
+
+public class GlobalServerMemoryBar {
+ protected static final NullPlugin NULL_PLUGIN = new NullPlugin();
+ protected static final Map<UUID, BossBar> uuid2Bossbars = Maps.newConcurrentMap();
+ protected static final Map<UUID, ScheduledTask> scheduledTasks = new HashMap<>();
+ private static final Logger logger = LogUtils.getLogger();
+ protected static volatile ScheduledTask scannerTask = null;
+
+ public static void init() {
+ cancelBarUpdateTask();
+
+ scannerTask = Bukkit.getGlobalRegionScheduler().runAtFixedRate(NULL_PLUGIN, unused -> {
+ try {
+ update();
+ } catch (Exception e) {
+ logger.error(e.getLocalizedMessage());
+ }
+ }, 1, MembarConfig.updateInterval);
+ }
+
+
+ public static void cancelBarUpdateTask() {
+ if (scannerTask == null || scannerTask.isCancelled()) {
+ return;
+ }
+
+ scannerTask.cancel();
+
+ for (ScheduledTask task : scheduledTasks.values()) {
+ if (!task.isCancelled()) {
+ task.cancel();
+ }
+ }
+ }
+
+ public static boolean isPlayerVisible(Player player) {
+ return ((CraftPlayer) player).getHandle().isMemBarVisible;
+ }
+
+ public static void setVisibilityForPlayer(Player target, boolean canSee) {
+ ((CraftPlayer) target).getHandle().isMemBarVisible = canSee;
+ }
+
+ private static void update() {
+ doUpdate();
+ cleanUp();
+ }
+
+ private static void cleanUp() {
+ final List<UUID> toCleanUp = new ArrayList<>();
+
+ for (Map.Entry<UUID, ScheduledTask> toCheck : scheduledTasks.entrySet()) {
+ if (toCheck.getValue().isCancelled()) {
+ toCleanUp.add(toCheck.getKey());
+ }
+ }
+
+ for (UUID uuid : toCleanUp) {
+ scheduledTasks.remove(uuid);
+ }
+ }
+
+ private static void doUpdate() {
+ for (Player player : Bukkit.getOnlinePlayers()) {
+ scheduledTasks.computeIfAbsent(player.getUniqueId(), unused -> createBossBarForPlayer(player));
+ }
+ }
+
+ private static ScheduledTask createBossBarForPlayer(Player apiPlayer) {
+ return apiPlayer.getScheduler().runAtFixedRate(NULL_PLUGIN, (unused) -> {
+ final UUID playerUUID = apiPlayer.getUniqueId();
+
+ if (!isPlayerVisible(apiPlayer)) {
+ final BossBar removed = uuid2Bossbars.remove(playerUUID);
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+
+ return;
+ }
+
+ MemoryUsage heap = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+
+ long used = heap.getUsed();
+ long xmx = heap.getMax();
+
+ BossBar targetBossbar = uuid2Bossbars.computeIfAbsent(
+ playerUUID,
+ (unused1) -> BossBar.bossBar(Component.text(""), 0.0F, BossBar.Color.valueOf(MembarConfig.memColors.get(3)), BossBar.Overlay.NOTCHED_20)
+ );
+
+ apiPlayer.showBossBar(targetBossbar);
+
+ updateMembar(targetBossbar, used, xmx);
+ }, () -> {
+ final BossBar removed = uuid2Bossbars.remove(apiPlayer.getUniqueId());
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+ }, 1, MembarConfig.updateInterval);
+ }
+
+ private static void updateMembar(@NotNull BossBar bar, long used, long xmx) {
+ double percent = Math.max(Math.min((float) used / xmx, 1.0F), 0.0F);
+ bar.name(MiniMessage.miniMessage().deserialize(
+ MembarConfig.memBarFormat,
+ Placeholder.component("used", getMemoryComponent(used, xmx)),
+ Placeholder.component("available", getMaxMemComponent(xmx))
+ ));
+ bar.color(barColorFromMemory(percent));
+ bar.progress((float) percent);
+ }
+
+ private static @NotNull Component getMaxMemComponent(double max) {
+ final BossBar.Color colorBukkit = BossBar.Color.GREEN;
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.format("%.2f", max / (1024 * 1024))));
+ }
+
+ private static @NotNull Component getMemoryComponent(long used, long max) {
+ final BossBar.Color colorBukkit = barColorFromMemory(Math.max(Math.min((float) used / max, 1.0F), 0.0F));
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.format("%.2f", (double) used / (1024 * 1024))));
+ }
+
+ private static BossBar.Color barColorFromMemory(double memPercent) {
+ if (memPercent == -1) {
+ return BossBar.Color.valueOf(MembarConfig.memColors.get(3));
+ }
+
+ if (memPercent <= 50) {
+ return BossBar.Color.valueOf(MembarConfig.memColors.getFirst());
+ }
+
+ if (memPercent <= 70) {
+ return BossBar.Color.valueOf(MembarConfig.memColors.get(1));
+ }
+
+ return BossBar.Color.valueOf(MembarConfig.memColors.get(2));
+ }
+}

View File

@@ -1,188 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/functions/GlobalServerRegionBar.java
@@ -1,0 +_,185 @@
+package me.earthme.luminol.functions;
+
+import com.google.common.collect.Maps;
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.threadedregions.ThreadedRegionizer;
+import io.papermc.paper.threadedregions.TickData;
+import io.papermc.paper.threadedregions.TickRegionScheduler;
+import io.papermc.paper.threadedregions.TickRegions;
+import io.papermc.paper.threadedregions.scheduler.ScheduledTask;
+import me.earthme.luminol.config.modules.misc.RegionBarConfig;
+import me.earthme.luminol.utils.NullPlugin;
+import net.kyori.adventure.bossbar.BossBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.minimessage.MiniMessage;
+import net.kyori.adventure.text.minimessage.tag.resolver.Placeholder;
+import org.bukkit.Bukkit;
+import org.bukkit.craftbukkit.entity.CraftPlayer;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+
+import java.text.DecimalFormat;
+import java.util.*;
+
+public class GlobalServerRegionBar {
+ protected static final NullPlugin NULL_PLUGIN = new NullPlugin();
+ protected static final Map<UUID, BossBar> uuid2Bossbars = Maps.newConcurrentMap();
+ protected static final Map<UUID, ScheduledTask> scheduledTasks = new HashMap<>();
+ private static final Logger logger = LogUtils.getLogger();
+ private static final ThreadLocal<DecimalFormat> ONE_DECIMAL_PLACES = ThreadLocal.withInitial(() -> new DecimalFormat("#,##0.0"));
+ protected static volatile ScheduledTask scannerTask = null;
+
+ public static void init() {
+ cancelBarUpdateTask();
+
+ scannerTask = Bukkit.getGlobalRegionScheduler().runAtFixedRate(NULL_PLUGIN, unused -> {
+ try {
+ update();
+ cleanUp();
+ } catch (Exception e) {
+ logger.error(e.getLocalizedMessage());
+ }
+ }, 1, RegionBarConfig.updateInterval);
+ }
+
+ public static void cancelBarUpdateTask() {
+ if (scannerTask == null || scannerTask.isCancelled()) {
+ return;
+ }
+
+ scannerTask.cancel();
+
+ for (ScheduledTask task : scheduledTasks.values()) {
+ if (!task.isCancelled()) {
+ task.cancel();
+ }
+ }
+ }
+
+ public static boolean isPlayerVisible(Player player) {
+ return ((CraftPlayer) player).getHandle().isRegionBarVisible;
+ }
+
+ public static void setVisibilityForPlayer(Player target, boolean canSee) {
+ ((CraftPlayer) target).getHandle().isRegionBarVisible = canSee;
+ }
+
+ private static void update() {
+ for (Player player : Bukkit.getOnlinePlayers()) {
+ scheduledTasks.computeIfAbsent(player.getUniqueId(), unused -> createBossBarForPlayer(player));
+ }
+ }
+
+ private static void cleanUp() {
+ final List<UUID> toCleanUp = new ArrayList<>();
+
+ for (Map.Entry<UUID, ScheduledTask> toCheck : scheduledTasks.entrySet()) {
+ if (toCheck.getValue().isCancelled()) {
+ toCleanUp.add(toCheck.getKey());
+ }
+ }
+
+ for (UUID uuid : toCleanUp) {
+ scheduledTasks.remove(uuid);
+ }
+ }
+
+ public static ScheduledTask createBossBarForPlayer(@NotNull Player apiPlayer) {
+ final UUID playerUUID = apiPlayer.getUniqueId();
+
+ return apiPlayer.getScheduler().runAtFixedRate(NULL_PLUGIN, (n) -> {
+ if (!isPlayerVisible(apiPlayer)) {
+ final BossBar removed = uuid2Bossbars.remove(playerUUID);
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+ return;
+ }
+
+ final ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> region = TickRegionScheduler.getCurrentRegion();
+ final TickData.TickReportData reportData = region.getData().getRegionSchedulingHandle().getTickReport5s(System.nanoTime());
+ final TickRegions.RegionStats regionStats = region.getData().getRegionStats();
+
+ BossBar targetBossbar = uuid2Bossbars.computeIfAbsent(
+ playerUUID,
+ unused -> BossBar.bossBar(Component.text(""), 0.0F, BossBar.Color.GREEN, BossBar.Overlay.NOTCHED_20)
+ );
+
+ apiPlayer.showBossBar(targetBossbar);
+
+ if (reportData != null) {
+ final double utilisation = reportData.utilisation();
+ final int chunkCount = regionStats.getChunkCount();
+ final int playerCount = regionStats.getPlayerCount();
+ final int entityCount = regionStats.getEntityCount();
+
+ updateRegionBar(utilisation, chunkCount, playerCount, entityCount, targetBossbar);
+ }
+ }, () -> {
+ final BossBar removed = uuid2Bossbars.remove(playerUUID); // Auto clean up it
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+ }, 1, RegionBarConfig.updateInterval);
+ }
+
+ private static void updateRegionBar(double utilisation, int chunks, int players, int entities, @NotNull BossBar bar) {
+ final double utilisationPercent = utilisation * 100.0;
+ final String formattedUtil = ONE_DECIMAL_PLACES.get().format(utilisationPercent);
+
+ bar.name(MiniMessage.miniMessage().deserialize(
+ RegionBarConfig.regionBarFormat,
+ Placeholder.component("util", getUtilComponent(formattedUtil)),
+ Placeholder.component("chunks", getChunksComponent(chunks)),
+ Placeholder.component("players", getPlayersComponent(players)),
+ Placeholder.component("entities", getEntitiesComponent(entities))
+ ));
+
+ bar.color(barColorFromUtil(utilisationPercent));
+ bar.progress((float) Math.min(1.0, Math.max(utilisation, 0)));
+ }
+
+ private static @NotNull Component getEntitiesComponent(int entities) {
+ final String content = "<text>";
+ return MiniMessage.miniMessage().deserialize(content, Placeholder.parsed("text", String.valueOf(entities)));
+ }
+
+ private static @NotNull Component getPlayersComponent(int players) {
+ final String content = "<text>";
+ return MiniMessage.miniMessage().deserialize(content, Placeholder.parsed("text", String.valueOf(players)));
+ }
+
+ private static @NotNull Component getChunksComponent(int chunks) {
+ final String content = "<text>";
+ return MiniMessage.miniMessage().deserialize(content, Placeholder.parsed("text", String.valueOf(chunks)));
+ }
+
+ private static @NotNull Component getUtilComponent(String formattedUtil) {
+ final BossBar.Color colorBukkit = barColorFromUtil(Double.parseDouble(formattedUtil));
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", formattedUtil + "%"));
+ }
+
+ private static BossBar.Color barColorFromUtil(double util) {
+ if (util >= 100) {
+ return BossBar.Color.valueOf(RegionBarConfig.utilColors.get(3)); // PURPLE
+ }
+
+ if (util >= 70) {
+ return BossBar.Color.valueOf(RegionBarConfig.utilColors.get(2)); // RED
+ }
+
+ if (util >= 50) {
+ return BossBar.Color.valueOf(RegionBarConfig.utilColors.get(1)); // YELLOW
+ }
+
+ return BossBar.Color.valueOf(RegionBarConfig.utilColors.get(0)); // GREEN
+ }
+}

View File

@@ -1,243 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/functions/GlobalServerTpsBar.java
@@ -1,0 +_,240 @@
+package me.earthme.luminol.functions;
+
+import com.google.common.collect.Maps;
+import com.mojang.logging.LogUtils;
+import io.papermc.paper.threadedregions.ThreadedRegionizer;
+import io.papermc.paper.threadedregions.TickData;
+import io.papermc.paper.threadedregions.TickRegionScheduler;
+import io.papermc.paper.threadedregions.TickRegions;
+import io.papermc.paper.threadedregions.scheduler.ScheduledTask;
+import me.earthme.luminol.config.modules.misc.TpsBarConfig;
+import me.earthme.luminol.utils.NullPlugin;
+import net.kyori.adventure.bossbar.BossBar;
+import net.kyori.adventure.text.Component;
+import net.kyori.adventure.text.minimessage.MiniMessage;
+import net.kyori.adventure.text.minimessage.tag.resolver.Placeholder;
+import org.bukkit.Bukkit;
+import org.bukkit.craftbukkit.entity.CraftPlayer;
+import org.bukkit.entity.Player;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+
+import java.util.*;
+
+public class GlobalServerTpsBar {
+ protected static final NullPlugin NULL_PLUGIN = new NullPlugin();
+ protected static final Map<UUID, BossBar> uuid2Bossbars = Maps.newConcurrentMap();
+ protected static final Map<UUID, ScheduledTask> scheduledTasks = new HashMap<>();
+ private static final Logger logger = LogUtils.getLogger();
+ protected static volatile ScheduledTask scannerTask = null;
+
+ public static void init() {
+ cancelBarUpdateTask();
+
+ scannerTask = Bukkit.getGlobalRegionScheduler().runAtFixedRate(NULL_PLUGIN, unused -> {
+ try {
+ update();
+ cleanUp();
+ } catch (Exception e) {
+ logger.error(e.getLocalizedMessage());
+ }
+ }, 1, TpsBarConfig.updateInterval);
+ }
+
+ public static void cancelBarUpdateTask() {
+ if (scannerTask == null || scannerTask.isCancelled()) {
+ return;
+ }
+
+ scannerTask.cancel();
+
+ for (ScheduledTask task : scheduledTasks.values()) {
+ if (!task.isCancelled()) {
+ task.cancel();
+ }
+ }
+ }
+
+ public static boolean isPlayerVisible(Player player) {
+ return ((CraftPlayer) player).getHandle().isTpsBarVisible;
+ }
+
+ public static void setVisibilityForPlayer(Player target, boolean canSee) {
+ ((CraftPlayer) target).getHandle().isTpsBarVisible = canSee;
+ }
+
+ private static void update() {
+ for (Player player : Bukkit.getOnlinePlayers()) {
+ scheduledTasks.computeIfAbsent(player.getUniqueId(), unused -> createBossBarForPlayer(player));
+ }
+ }
+
+ private static void cleanUp() {
+ final List<UUID> toCleanUp = new ArrayList<>();
+
+ for (Map.Entry<UUID, ScheduledTask> toCheck : scheduledTasks.entrySet()) {
+ if (toCheck.getValue().isCancelled()) {
+ toCleanUp.add(toCheck.getKey());
+ }
+ }
+
+ for (UUID uuid : toCleanUp) {
+ scheduledTasks.remove(uuid);
+ }
+ }
+
+ public static ScheduledTask createBossBarForPlayer(@NotNull Player apiPlayer) {
+ final UUID playerUUID = apiPlayer.getUniqueId();
+
+ return apiPlayer.getScheduler().runAtFixedRate(NULL_PLUGIN, (n) -> {
+ if (!isPlayerVisible(apiPlayer)) {
+ final BossBar removed = uuid2Bossbars.remove(playerUUID);
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+ return;
+ }
+
+ final ThreadedRegionizer.ThreadedRegion<TickRegions.TickRegionData, TickRegions.TickRegionSectionData> region = TickRegionScheduler.getCurrentRegion();
+ final TickData.TickReportData reportData = region.getData().getRegionSchedulingHandle().getTickReport5s(System.nanoTime());
+
+
+ BossBar targetBossbar = uuid2Bossbars.computeIfAbsent(
+ playerUUID,
+ unused -> BossBar.bossBar(Component.text(""), 0.0F, BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(3)), BossBar.Overlay.NOTCHED_20)
+ );
+
+ apiPlayer.showBossBar(targetBossbar);
+
+ if (reportData != null) {
+ final TickData.SegmentData tpsData = reportData.tpsData().segmentAll();
+ final double mspt = reportData.timePerTickData().segmentAll().average() / 1.0E6;
+
+ updateTpsBar(tpsData.average(), mspt, targetBossbar, apiPlayer);
+ }
+ }, () -> {
+ final BossBar removed = uuid2Bossbars.remove(playerUUID); // Auto clean up it
+
+ if (removed != null) {
+ apiPlayer.hideBossBar(removed);
+ }
+ }, 1, TpsBarConfig.updateInterval);
+ }
+
+ private static void updateTpsBar(double tps, double mspt, @NotNull BossBar bar, @NotNull Player player) {
+ bar.name(MiniMessage.miniMessage().deserialize(
+ TpsBarConfig.tpsBarFormat,
+ Placeholder.component("tps", getTpsComponent(tps)),
+ Placeholder.component("mspt", getMsptComponent(mspt)),
+ Placeholder.component("ping", getPingComponent(player.getPing())),
+ Placeholder.component("chunkhot", getChunkHotComponent(player.getNearbyChunkHot()))
+ ));
+ bar.color(barColorFromTps(tps));
+ bar.progress((float) Math.min((float) 1, Math.max(mspt / 50, 0)));
+ }
+
+ private static @NotNull Component getPingComponent(int ping) {
+ final BossBar.Color colorBukkit = barColorFromPing(ping);
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.valueOf(ping)));
+ }
+
+ private static BossBar.Color barColorFromPing(int ping) {
+ if (ping == -1) {
+ return BossBar.Color.valueOf(TpsBarConfig.pingColors.get(3));
+ }
+
+ if (ping <= 80) {
+ return BossBar.Color.valueOf(TpsBarConfig.pingColors.get(0));
+ }
+
+ if (ping <= 160) {
+ return BossBar.Color.valueOf(TpsBarConfig.pingColors.get(1));
+ }
+
+ return BossBar.Color.valueOf(TpsBarConfig.pingColors.get(2));
+ }
+
+ private static @NotNull Component getMsptComponent(double mspt) {
+ final BossBar.Color colorBukkit = barColorFromMspt(mspt);
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.format("%.2f", mspt)));
+ }
+
+ private static @NotNull Component getChunkHotComponent(long chunkHot) {
+ final BossBar.Color colorBukkit = barColorFromChunkHot(chunkHot);
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.valueOf(chunkHot)));
+ }
+
+ private static BossBar.Color barColorFromChunkHot(long chunkHot) {
+ if (chunkHot == -1) {
+ return BossBar.Color.valueOf(TpsBarConfig.chunkHotColors.get(3));
+ }
+
+ if (chunkHot <= 300000L) {
+ return BossBar.Color.valueOf(TpsBarConfig.chunkHotColors.get(0));
+ }
+
+ if (chunkHot <= 500000L) {
+ return BossBar.Color.valueOf(TpsBarConfig.chunkHotColors.get(1));
+ }
+
+ return BossBar.Color.valueOf(TpsBarConfig.chunkHotColors.get(2));
+ }
+
+ private static BossBar.Color barColorFromMspt(double mspt) {
+ if (mspt == -1) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(3));
+ }
+
+ if (mspt <= 25) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(0));
+ }
+
+ if (mspt <= 50) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(1));
+ }
+
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(2));
+ }
+
+ private static @NotNull Component getTpsComponent(double tps) {
+ final BossBar.Color colorBukkit = barColorFromTps(tps);
+ final String colorString = colorBukkit.name();
+
+ final String content = "<%s><text></%s>";
+ final String replaced = String.format(content, colorString, colorString);
+
+ return MiniMessage.miniMessage().deserialize(replaced, Placeholder.parsed("text", String.format("%.2f", tps)));
+ }
+
+ private static BossBar.Color barColorFromTps(double tps) {
+ if (tps == -1) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(3));
+ }
+
+ if (tps >= 18) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(0));
+ }
+
+ if (tps >= 15) {
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(1));
+ }
+
+ return BossBar.Color.valueOf(TpsBarConfig.tpsColors.get(2));
+ }
+}

View File

@@ -1,37 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/DirectBufferReleaser.java
@@ -1,0 +_,34 @@
+package me.earthme.luminol.utils;
+
+import org.jetbrains.annotations.NotNull;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+
+public class DirectBufferReleaser {
+ private static final Method CLEANER_METHOD;
+ private static final Object UNSAFE;
+
+ static {
+ try {
+ Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
+ Field theUnsafe = unsafeClass.getDeclaredField("theUnsafe");
+ theUnsafe.setAccessible(true);
+ UNSAFE = theUnsafe.get(null);
+ CLEANER_METHOD = unsafeClass.getMethod("invokeCleaner", ByteBuffer.class);
+ } catch (Exception ex) {
+ throw new RuntimeException("Unsafe init failed", ex);
+ }
+ }
+
+ public static boolean clean(@NotNull ByteBuffer buffer) {
+ if (!buffer.isDirect()) return false;
+ try {
+ CLEANER_METHOD.invoke(UNSAFE, buffer);
+ return true;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+}

View File

@@ -1,45 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/EnumRegionFormat.java
@@ -1,0 +_,42 @@
+package me.earthme.luminol.utils;
+
+import abomination.LinearRegionFile;
+import me.earthme.luminol.config.modules.misc.RegionFormatConfig;
+import me.earthme.luminol.data.BufferedLinearRegionFile;
+import net.minecraft.world.level.chunk.storage.RegionFile;
+import org.jetbrains.annotations.Nullable;
+
+public enum EnumRegionFormat {
+ MCA("mca", "mca", (info) -> new RegionFile(info.info(), info.filePath(), info.folder(), info.sync())),
+ LINEAR_V2("linear_v2", "linear", (info) -> new LinearRegionFile(info.info(), info.filePath(), info.folder(), info.sync(), RegionFormatConfig.linearCompressionLevel)),
+ B_LINEAR("b_linear", "b_linear", (info) -> new BufferedLinearRegionFile(info.filePath(), RegionFormatConfig.linearCompressionLevel));
+
+ private final String name;
+ private final String argument;
+ private final IRegionCreateFunction creator;
+
+ EnumRegionFormat(String name, String argument, IRegionCreateFunction creator) {
+ this.name = name;
+ this.argument = argument;
+ this.creator = creator;
+ }
+
+ @Nullable
+ public static EnumRegionFormat fromString(String string) {
+ for (EnumRegionFormat format : values()) {
+ if (format.name.equalsIgnoreCase(string)) {
+ return format;
+ }
+ }
+
+ return null;
+ }
+
+ public IRegionCreateFunction getCreator() {
+ return this.creator;
+ }
+
+ public String getArgument() {
+ return this.argument;
+ }
+}

View File

@@ -1,12 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/IRegionCreateFunction.java
@@ -1,0 +_,9 @@
+package me.earthme.luminol.utils;
+
+import abomination.IRegionFile;
+
+import java.io.IOException;
+
+public interface IRegionCreateFunction {
+ IRegionFile create(RegionCreatorInfo info) throws IOException;
+}

View File

@@ -1,155 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/NullPlugin.java
@@ -1,0 +_,152 @@
+package me.earthme.luminol.utils;
+
+import org.bukkit.Server;
+import org.bukkit.command.Command;
+import org.bukkit.command.CommandSender;
+import org.bukkit.configuration.file.FileConfiguration;
+import org.bukkit.generator.BiomeProvider;
+import org.bukkit.generator.ChunkGenerator;
+import org.bukkit.plugin.PluginBase;
+import org.bukkit.plugin.PluginDescriptionFile;
+import org.bukkit.plugin.PluginLoader;
+import org.bukkit.plugin.PluginLogger;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.File;
+import java.io.InputStream;
+import java.util.List;
+
+public class NullPlugin extends PluginBase {
+ private final String pluginName;
+ private boolean enabled = true;
+ private PluginDescriptionFile pdf;
+
+ public NullPlugin() {
+ this.pluginName = "Minecraft";
+ pdf = new PluginDescriptionFile(pluginName, "1.0", "nms");
+ }
+
+ @Override
+ public File getDataFolder() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public PluginDescriptionFile getDescription() {
+ return pdf;
+ }
+
+ // Paper start
+ @Override
+ public io.papermc.paper.plugin.configuration.PluginMeta getPluginMeta() {
+ return pdf;
+ }
+
+ @Override
+ public FileConfiguration getConfig() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+ // Paper end
+
+ @Override
+ public InputStream getResource(String filename) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void saveConfig() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void saveDefaultConfig() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void saveResource(String resourcePath, boolean replace) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void reloadConfig() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public PluginLogger getLogger() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public PluginLoader getPluginLoader() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public Server getServer() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ @Override
+ public void onDisable() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void onLoad() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void onEnable() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public boolean isNaggable() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public void setNaggable(boolean canNag) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public ChunkGenerator getDefaultWorldGenerator(String worldName, String id) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public @Nullable BiomeProvider getDefaultBiomeProvider(@NotNull String worldName, @Nullable String id) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ @Override
+ public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+
+ // Paper start - lifecycle events
+ @Override
+ public @NotNull io.papermc.paper.plugin.lifecycle.event.LifecycleEventManager<org.bukkit.plugin.Plugin> getLifecycleManager() {
+ throw new UnsupportedOperationException("Not supported.");
+ }
+ // Paper end - lifecycle events
+}

View File

@@ -1,11 +0,0 @@
--- /dev/null
+++ b/src/main/java/me/earthme/luminol/utils/RegionCreatorInfo.java
@@ -1,0 +_,8 @@
+package me.earthme.luminol.utils;
+
+import net.minecraft.world.level.chunk.storage.RegionStorageInfo;
+
+import java.nio.file.Path;
+
+public record RegionCreatorInfo(RegionStorageInfo info, Path filePath, Path folder, boolean sync) {
+}

View File

@@ -1,97 +0,0 @@
--- /dev/null
+++ b/src/main/java/su/plo/matter/Globals.java
@@ -1,0 +_,94 @@
+package su.plo.matter;
+
+import com.google.common.collect.Iterables;
+import net.minecraft.server.level.ServerLevel;
+
+import java.math.BigInteger;
+import java.security.SecureRandom;
+import java.util.Optional;
+
+public class Globals {
+ public static final int WORLD_SEED_LONGS = 16;
+ public static final int WORLD_SEED_BITS = WORLD_SEED_LONGS * 64;
+
+ public static final long[] worldSeed = new long[WORLD_SEED_LONGS];
+ public static final ThreadLocal<Integer> dimension = ThreadLocal.withInitial(() -> 0);
+
+ public enum Salt {
+ UNDEFINED,
+ BASTION_FEATURE,
+ WOODLAND_MANSION_FEATURE,
+ MINESHAFT_FEATURE,
+ BURIED_TREASURE_FEATURE,
+ NETHER_FORTRESS_FEATURE,
+ PILLAGER_OUTPOST_FEATURE,
+ GEODE_FEATURE,
+ NETHER_FOSSIL_FEATURE,
+ OCEAN_MONUMENT_FEATURE,
+ RUINED_PORTAL_FEATURE,
+ POTENTIONAL_FEATURE,
+ GENERATE_FEATURE,
+ JIGSAW_PLACEMENT,
+ STRONGHOLDS,
+ POPULATION,
+ DECORATION,
+ SLIME_CHUNK
+ }
+
+ public static void setupGlobals(ServerLevel world) {
+ if (!me.earthme.luminol.config.modules.misc.SecureSeedConfig.enabled) return;
+
+ long[] seed = world.getServer().getWorldData().worldGenOptions().featureSeed();
+ System.arraycopy(seed, 0, worldSeed, 0, WORLD_SEED_LONGS);
+ int worldIndex = Iterables.indexOf(world.getServer().levelKeys(), it -> it == world.dimension());
+ if (worldIndex == -1)
+ worldIndex = world.getServer().levelKeys().size(); // if we are in world construction it may not have been added to the map yet
+ dimension.set(worldIndex);
+ }
+
+ public static long[] createRandomWorldSeed() {
+ long[] seed = new long[WORLD_SEED_LONGS];
+ SecureRandom rand = new SecureRandom();
+ for (int i = 0; i < WORLD_SEED_LONGS; i++) {
+ seed[i] = rand.nextLong();
+ }
+ return seed;
+ }
+
+ // 1024-bit string -> 16 * 64 long[]
+ public static Optional<long[]> parseSeed(String seedStr) {
+ if (seedStr.isEmpty()) return Optional.empty();
+
+ if (seedStr.length() != WORLD_SEED_BITS) {
+ throw new IllegalArgumentException("Secure seed length must be " + WORLD_SEED_BITS + "-bit but found " + seedStr.length() + "-bit.");
+ }
+
+ long[] seed = new long[WORLD_SEED_LONGS];
+
+ for (int i = 0; i < WORLD_SEED_LONGS; i++) {
+ int start = i * 64;
+ int end = start + 64;
+ String seedSection = seedStr.substring(start, end);
+
+ BigInteger seedInDecimal = new BigInteger(seedSection, 2);
+ seed[i] = seedInDecimal.longValue();
+ }
+
+ return Optional.of(seed);
+ }
+
+ // 16 * 64 long[] -> 1024-bit string
+ public static String seedToString(long[] seed) {
+ StringBuilder sb = new StringBuilder();
+
+ for (long longV : seed) {
+ // Convert to 64-bit binary string per long
+ // Use format to keep 64-bit length, and use 0 to complete space
+ String binaryStr = String.format("%64s", Long.toBinaryString(longV)).replace(' ', '0');
+
+ sb.append(binaryStr);
+ }
+
+ return sb.toString();
+ }
+}

View File

@@ -1,76 +0,0 @@
--- /dev/null
+++ b/src/main/java/su/plo/matter/Hashing.java
@@ -1,0 +_,73 @@
+package su.plo.matter;
+
+public class Hashing {
+ // https://en.wikipedia.org/wiki/BLAKE_(hash_function)
+ // https://github.com/bcgit/bc-java/blob/master/core/src/main/java/org/bouncycastle/crypto/digests/Blake2bDigest.java
+
+ private final static long[] blake2b_IV = {
+ 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL,
+ 0xa54ff53a5f1d36f1L, 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL,
+ 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L
+ };
+
+ private final static byte[][] blake2b_sigma = {
+ {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
+ {14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3},
+ {11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4},
+ {7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8},
+ {9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13},
+ {2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9},
+ {12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11},
+ {13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10},
+ {6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5},
+ {10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0},
+ {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15},
+ {14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3}
+ };
+
+ public static long[] hashWorldSeed(long[] worldSeed) {
+ long[] result = blake2b_IV.clone();
+ result[0] ^= 0x01010040;
+ hash(worldSeed, result, new long[16], 0, false);
+ return result;
+ }
+
+ public static void hash(long[] message, long[] chainValue, long[] internalState, long messageOffset, boolean isFinal) {
+ assert message.length == 16;
+ assert chainValue.length == 8;
+ assert internalState.length == 16;
+
+ System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);
+ System.arraycopy(blake2b_IV, 0, internalState, chainValue.length, 4);
+ internalState[12] = messageOffset ^ blake2b_IV[4];
+ internalState[13] = blake2b_IV[5];
+ if (isFinal) internalState[14] = ~blake2b_IV[6];
+ internalState[15] = blake2b_IV[7];
+
+ for (int round = 0; round < 12; round++) {
+ G(message[blake2b_sigma[round][0]], message[blake2b_sigma[round][1]], 0, 4, 8, 12, internalState);
+ G(message[blake2b_sigma[round][2]], message[blake2b_sigma[round][3]], 1, 5, 9, 13, internalState);
+ G(message[blake2b_sigma[round][4]], message[blake2b_sigma[round][5]], 2, 6, 10, 14, internalState);
+ G(message[blake2b_sigma[round][6]], message[blake2b_sigma[round][7]], 3, 7, 11, 15, internalState);
+ G(message[blake2b_sigma[round][8]], message[blake2b_sigma[round][9]], 0, 5, 10, 15, internalState);
+ G(message[blake2b_sigma[round][10]], message[blake2b_sigma[round][11]], 1, 6, 11, 12, internalState);
+ G(message[blake2b_sigma[round][12]], message[blake2b_sigma[round][13]], 2, 7, 8, 13, internalState);
+ G(message[blake2b_sigma[round][14]], message[blake2b_sigma[round][15]], 3, 4, 9, 14, internalState);
+ }
+
+ for (int i = 0; i < 8; i++) {
+ chainValue[i] ^= internalState[i] ^ internalState[i + 8];
+ }
+ }
+
+ private static void G(long m1, long m2, int posA, int posB, int posC, int posD, long[] internalState) {
+ internalState[posA] = internalState[posA] + internalState[posB] + m1;
+ internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);
+ internalState[posC] = internalState[posC] + internalState[posD];
+ internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE
+ internalState[posA] = internalState[posA] + internalState[posB] + m2;
+ internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);
+ internalState[posC] = internalState[posC] + internalState[posD];
+ internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE
+ }
+}

View File

@@ -1,162 +0,0 @@
--- /dev/null
+++ b/src/main/java/su/plo/matter/WorldgenCryptoRandom.java
@@ -1,0 +_,159 @@
+package su.plo.matter;
+
+import net.minecraft.util.Mth;
+import net.minecraft.util.RandomSource;
+import net.minecraft.world.level.levelgen.LegacyRandomSource;
+import net.minecraft.world.level.levelgen.WorldgenRandom;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.Arrays;
+
+public class WorldgenCryptoRandom extends WorldgenRandom {
+ // hash the world seed to guard against badly chosen world seeds
+ private static final long[] HASHED_ZERO_SEED = Hashing.hashWorldSeed(new long[Globals.WORLD_SEED_LONGS]);
+ private static final ThreadLocal<long[]> LAST_SEEN_WORLD_SEED = ThreadLocal.withInitial(() -> new long[Globals.WORLD_SEED_LONGS]);
+ private static final ThreadLocal<long[]> HASHED_WORLD_SEED = ThreadLocal.withInitial(() -> HASHED_ZERO_SEED);
+
+ private final long[] worldSeed = new long[Globals.WORLD_SEED_LONGS];
+ private final long[] randomBits = new long[8];
+ private int randomBitIndex;
+ private static final int MAX_RANDOM_BIT_INDEX = 64 * 8;
+ private static final int LOG2_MAX_RANDOM_BIT_INDEX = 9;
+ private long counter;
+ private final long[] message = new long[16];
+ private final long[] cachedInternalState = new long[16];
+
+ public WorldgenCryptoRandom(int x, int z, Globals.Salt typeSalt, long salt) {
+ super(new LegacyRandomSource(0L));
+ if (typeSalt != null) {
+ this.setSecureSeed(x, z, typeSalt, salt);
+ }
+ }
+
+ public void setSecureSeed(int x, int z, Globals.Salt typeSalt, long salt) {
+ System.arraycopy(Globals.worldSeed, 0, this.worldSeed, 0, Globals.WORLD_SEED_LONGS);
+ message[0] = ((long) x << 32) | ((long) z & 0xffffffffL);
+ message[1] = ((long) Globals.dimension.get() << 32) | ((long) salt & 0xffffffffL);
+ message[2] = typeSalt.ordinal();
+ message[3] = counter = 0;
+ randomBitIndex = MAX_RANDOM_BIT_INDEX;
+ }
+
+ private long[] getHashedWorldSeed() {
+ if (!Arrays.equals(worldSeed, LAST_SEEN_WORLD_SEED.get())) {
+ HASHED_WORLD_SEED.set(Hashing.hashWorldSeed(worldSeed));
+ System.arraycopy(worldSeed, 0, LAST_SEEN_WORLD_SEED.get(), 0, Globals.WORLD_SEED_LONGS);
+ }
+ return HASHED_WORLD_SEED.get();
+ }
+
+ private void moreRandomBits() {
+ message[3] = counter++;
+ System.arraycopy(getHashedWorldSeed(), 0, randomBits, 0, 8);
+ Hashing.hash(message, randomBits, cachedInternalState, 64, true);
+ }
+
+ private long getBits(int count) {
+ if (randomBitIndex >= MAX_RANDOM_BIT_INDEX) {
+ moreRandomBits();
+ randomBitIndex -= MAX_RANDOM_BIT_INDEX;
+ }
+
+ int alignment = randomBitIndex & 63;
+ if ((randomBitIndex >>> 6) == ((randomBitIndex + count) >>> 6)) {
+ long result = (randomBits[randomBitIndex >>> 6] >>> alignment) & ((1L << count) - 1);
+ randomBitIndex += count;
+ return result;
+ } else {
+ long result = (randomBits[randomBitIndex >>> 6] >>> alignment) & ((1L << (64 - alignment)) - 1);
+ randomBitIndex += count;
+ if (randomBitIndex >= MAX_RANDOM_BIT_INDEX) {
+ moreRandomBits();
+ randomBitIndex -= MAX_RANDOM_BIT_INDEX;
+ }
+ alignment = randomBitIndex & 63;
+ result <<= alignment;
+ result |= (randomBits[randomBitIndex >>> 6] >>> (64 - alignment)) & ((1L << alignment) - 1);
+
+ return result;
+ }
+ }
+
+ @Override
+ public @NotNull RandomSource fork() {
+ WorldgenCryptoRandom fork = new WorldgenCryptoRandom(0, 0, null, 0);
+
+ System.arraycopy(Globals.worldSeed, 0, fork.worldSeed, 0, Globals.WORLD_SEED_LONGS);
+ fork.message[0] = this.message[0];
+ fork.message[1] = this.message[1];
+ fork.message[2] = this.message[2];
+ fork.message[3] = this.message[3];
+ fork.randomBitIndex = this.randomBitIndex;
+ fork.counter = this.counter;
+ fork.nextLong();
+
+ return fork;
+ }
+
+ @Override
+ public int next(int bits) {
+ return (int) getBits(bits);
+ }
+
+ @Override
+ public void consumeCount(int count) {
+ randomBitIndex += count;
+ if (randomBitIndex >= MAX_RANDOM_BIT_INDEX * 2) {
+ randomBitIndex -= MAX_RANDOM_BIT_INDEX;
+ counter += randomBitIndex >>> LOG2_MAX_RANDOM_BIT_INDEX;
+ randomBitIndex &= MAX_RANDOM_BIT_INDEX - 1;
+ randomBitIndex += MAX_RANDOM_BIT_INDEX;
+ }
+ }
+
+ @Override
+ public int nextInt(int bound) {
+ int bits = Mth.ceillog2(bound);
+ int result;
+ do {
+ result = (int) getBits(bits);
+ } while (result >= bound);
+
+ return result;
+ }
+
+ @Override
+ public long nextLong() {
+ return getBits(64);
+ }
+
+ @Override
+ public double nextDouble() {
+ return getBits(53) * 0x1.0p-53;
+ }
+
+ @Override
+ public long setDecorationSeed(long worldSeed, int blockX, int blockZ) {
+ setSecureSeed(blockX, blockZ, Globals.Salt.POPULATION, 0);
+ return ((long) blockX << 32) | ((long) blockZ & 0xffffffffL);
+ }
+
+ @Override
+ public void setFeatureSeed(long populationSeed, int index, int step) {
+ setSecureSeed((int) (populationSeed >> 32), (int) populationSeed, Globals.Salt.DECORATION, index + 10000L * step);
+ }
+
+ @Override
+ public void setLargeFeatureSeed(long worldSeed, int chunkX, int chunkZ) {
+ super.setLargeFeatureSeed(worldSeed, chunkX, chunkZ);
+ }
+
+ @Override
+ public void setLargeFeatureWithSalt(long worldSeed, int regionX, int regionZ, int salt) {
+ super.setLargeFeatureWithSalt(worldSeed, regionX, regionZ, salt);
+ }
+
+ public static RandomSource seedSlimeChunk(int chunkX, int chunkZ) {
+ return new WorldgenCryptoRandom(chunkX, chunkZ, Globals.Salt.SLIME_CHUNK, 0);
+ }
+}

View File

@@ -0,0 +1,41 @@
package abomination;
import ca.spottedleaf.moonrise.patches.chunk_system.storage.ChunkSystemRegionFile;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.world.level.ChunkPos;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.Path;
public interface IRegionFile extends ChunkSystemRegionFile, AutoCloseable {
Path getPath();
DataInputStream getChunkDataInputStream(ChunkPos pos) throws IOException;
boolean doesChunkExist(ChunkPos pos) throws Exception;
DataOutputStream getChunkDataOutputStream(ChunkPos pos) throws IOException;
void flush() throws IOException;
void clear(ChunkPos pos) throws IOException;
boolean hasChunk(ChunkPos pos);
void close() throws IOException;
void write(ChunkPos pos, ByteBuffer buf) throws IOException;
CompoundTag getOversizedData(int x, int z) throws IOException;
boolean isOversized(int x, int z);
boolean recalculateHeader() throws IOException;
void setOversized(int x, int z, boolean oversized) throws IOException;
default int getRecalculateCount() {return 0;} // Luminol - Configurable region file format
}

View File

@@ -0,0 +1,622 @@
package abomination;
import ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO;
import com.github.luben.zstd.ZstdInputStream;
import com.github.luben.zstd.ZstdOutputStream;
import com.mojang.logging.LogUtils;
import net.jpountz.lz4.LZ4Compressor;
import net.jpountz.lz4.LZ4Factory;
import net.jpountz.lz4.LZ4FastDecompressor;
import net.openhft.hashing.LongHashFunction;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.world.level.chunk.storage.RegionStorageInfo;
import net.minecraft.world.level.chunk.storage.RegionFileVersion;
import net.minecraft.world.level.ChunkPos;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.LockSupport;
import java.util.concurrent.locks.ReentrantLock;
// LinearRegionFile_implementation_version_0_5byXymb
// Just gonna use this string to inform other forks about updates ;-)
public class LinearRegionFile implements IRegionFile{
private static final long SUPERBLOCK = 0xc3ff13183cca9d9aL;
private static final byte VERSION = 3;
private static final int HEADER_SIZE = 27;
private static final int FOOTER_SIZE = 8;
private static final Logger LOGGER = LogUtils.getLogger();
private byte[][] bucketBuffers;
private final byte[][] buffer = new byte[1024][];
private final int[] bufferUncompressedSize = new int[1024];
private final long[] chunkTimestamps = new long[1024];
private final Object markedToSaveLock = new Object();
private final LZ4Compressor compressor;
private final LZ4FastDecompressor decompressor;
private boolean markedToSave = false;
private boolean close = false;
public final ReentrantLock fileLock = new ReentrantLock(true);
public Path regionFile;
private final int compressionLevel;
private int gridSize = 8;
private int bucketSize = 4;
private final Thread bindThread;
public Path getRegionFile() {
return this.regionFile;
}
public ReentrantLock getFileLock() {
return this.fileLock;
}
private int chunkToBucketIdx(int chunkX, int chunkZ) {
int bx = chunkX / bucketSize, bz = chunkZ / bucketSize;
return bx * gridSize + bz;
}
private void openBucket(int chunkX, int chunkZ) {
chunkX = Math.floorMod(chunkX, 32);
chunkZ = Math.floorMod(chunkZ, 32);
int idx = chunkToBucketIdx(chunkX, chunkZ);
if (bucketBuffers == null) return;
if (bucketBuffers[idx] != null) {
try {
ByteArrayInputStream bucketByteStream = new ByteArrayInputStream(bucketBuffers[idx]);
ZstdInputStream zstdStream = new ZstdInputStream(bucketByteStream);
ByteBuffer bucketBuffer = ByteBuffer.wrap(zstdStream.readAllBytes());
int bx = chunkX / bucketSize, bz = chunkZ / bucketSize;
for (int cx = 0; cx < 32 / gridSize; cx++) {
for (int cz = 0; cz < 32 / gridSize; cz++) {
int chunkIndex = (bx * (32 / gridSize) + cx) + (bz * (32 / gridSize) + cz) * 32;
int chunkSize = bucketBuffer.getInt();
long timestamp = bucketBuffer.getLong();
this.chunkTimestamps[chunkIndex] = timestamp;
if (chunkSize > 0) {
byte[] chunkData = new byte[chunkSize - 8];
bucketBuffer.get(chunkData);
int maxCompressedLength = this.compressor.maxCompressedLength(chunkData.length);
byte[] compressed = new byte[maxCompressedLength];
int compressedLength = this.compressor.compress(chunkData, 0, chunkData.length, compressed, 0, maxCompressedLength);
byte[] finalCompressed = new byte[compressedLength];
System.arraycopy(compressed, 0, finalCompressed, 0, compressedLength);
// TODO: Optimization - return the requested chunk immediately to save on one LZ4 decompression
this.buffer[chunkIndex] = finalCompressed;
this.bufferUncompressedSize[chunkIndex] = chunkData.length;
}
}
}
} catch (IOException ex) {
throw new RuntimeException("Region file corrupted: " + regionFile + " bucket: " + idx);
// TODO: Make sure the server crashes instead of corrupting the world
}
bucketBuffers[idx] = null;
}
}
public boolean regionFileOpen = false;
private synchronized void openRegionFile() {
if (regionFileOpen) return;
regionFileOpen = true;
File regionFile = new File(this.regionFile.toString());
if(!regionFile.canRead()) {
this.bindThread.start();
return;
}
try {
byte[] fileContent = Files.readAllBytes(this.regionFile);
ByteBuffer buffer = ByteBuffer.wrap(fileContent);
long superBlock = buffer.getLong();
if (superBlock != SUPERBLOCK)
throw new RuntimeException("Invalid superblock: " + superBlock + " file " + this.regionFile);
byte version = buffer.get();
if (version == 1 || version == 2) {
parseLinearV1(buffer);
} else if (version == 3) {
parseLinearV2(buffer);
} else {
throw new RuntimeException("Invalid version: " + version + " file " + this.regionFile);
}
this.bindThread.start();
} catch (IOException e) {
throw new RuntimeException("Failed to open region file " + this.regionFile, e);
}
}
private void parseLinearV1(ByteBuffer buffer) throws IOException {
final int HEADER_SIZE = 32;
final int FOOTER_SIZE = 8;
// Skip newestTimestamp (Long) + Compression level (Byte) + Chunk count (Short): Unused.
buffer.position(buffer.position() + 11);
int dataCount = buffer.getInt();
long fileLength = this.regionFile.toFile().length();
if (fileLength != HEADER_SIZE + dataCount + FOOTER_SIZE) {
throw new IOException("Invalid file length: " + this.regionFile + " " + fileLength + " " + (HEADER_SIZE + dataCount + FOOTER_SIZE));
}
buffer.position(buffer.position() + 8); // Skip data hash (Long): Unused.
byte[] rawCompressed = new byte[dataCount];
buffer.get(rawCompressed);
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(rawCompressed);
ZstdInputStream zstdInputStream = new ZstdInputStream(byteArrayInputStream);
ByteBuffer decompressedBuffer = ByteBuffer.wrap(zstdInputStream.readAllBytes());
int[] starts = new int[1024];
for (int i = 0; i < 1024; i++) {
starts[i] = decompressedBuffer.getInt();
decompressedBuffer.getInt(); // Skip timestamps (Int): Unused.
}
for (int i = 0; i < 1024; i++) {
if (starts[i] > 0) {
int size = starts[i];
byte[] chunkData = new byte[size];
decompressedBuffer.get(chunkData);
int maxCompressedLength = this.compressor.maxCompressedLength(size);
byte[] compressed = new byte[maxCompressedLength];
int compressedLength = this.compressor.compress(chunkData, 0, size, compressed, 0, maxCompressedLength);
byte[] finalCompressed = new byte[compressedLength];
System.arraycopy(compressed, 0, finalCompressed, 0, compressedLength);
this.buffer[i] = finalCompressed;
this.bufferUncompressedSize[i] = size;
this.chunkTimestamps[i] = getTimestamp(); // Use current timestamp as we don't have the original
}
}
}
private void parseLinearV2(ByteBuffer buffer) throws IOException {
buffer.getLong(); // Skip newestTimestamp (Long)
gridSize = buffer.get();
if (gridSize != 1 && gridSize != 2 && gridSize != 4 && gridSize != 8 && gridSize != 16 && gridSize != 32)
throw new RuntimeException("Invalid grid size: " + gridSize + " file " + this.regionFile);
bucketSize = 32 / gridSize;
buffer.getInt(); // Skip region_x (Int)
buffer.getInt(); // Skip region_z (Int)
boolean[] chunkExistenceBitmap = deserializeExistenceBitmap(buffer);
while (true) {
byte featureNameLength = buffer.get();
if (featureNameLength == 0) break;
byte[] featureNameBytes = new byte[featureNameLength];
buffer.get(featureNameBytes);
String featureName = new String(featureNameBytes);
int featureValue = buffer.getInt();
// System.out.println("NBT Feature: " + featureName + " = " + featureValue);
}
int[] bucketSizes = new int[gridSize * gridSize];
byte[] bucketCompressionLevels = new byte[gridSize * gridSize];
long[] bucketHashes = new long[gridSize * gridSize];
for (int i = 0; i < gridSize * gridSize; i++) {
bucketSizes[i] = buffer.getInt();
bucketCompressionLevels[i] = buffer.get();
bucketHashes[i] = buffer.getLong();
}
bucketBuffers = new byte[gridSize * gridSize][];
for (int i = 0; i < gridSize * gridSize; i++) {
if (bucketSizes[i] > 0) {
bucketBuffers[i] = new byte[bucketSizes[i]];
buffer.get(bucketBuffers[i]);
long rawHash = LongHashFunction.xx().hashBytes(bucketBuffers[i]);
if (rawHash != bucketHashes[i]) throw new IOException("Region file hash incorrect " + this.regionFile);
}
}
long footerSuperBlock = buffer.getLong();
if (footerSuperBlock != SUPERBLOCK)
throw new IOException("Footer superblock invalid " + this.regionFile);
}
public LinearRegionFile(RegionStorageInfo storageKey, Path directory, Path path, boolean dsync, int compressionLevel) throws IOException {
this(storageKey, directory, path, RegionFileVersion.getCompressionFormat(), dsync, compressionLevel);
}
public LinearRegionFile(RegionStorageInfo storageKey, Path path, Path directory, RegionFileVersion compressionFormat, boolean dsync, int compressionLevel) throws IOException {
Runnable flushCheck = () -> {
while (!close) {
synchronized (saveLock) {
if (markedToSave && activeSaveThreads < SAVE_THREAD_MAX_COUNT) {
activeSaveThreads++;
Runnable flushOperation = () -> {
try {
flush();
} catch (IOException ex) {
LOGGER.error("Region file {} flush failed", this.regionFile.toAbsolutePath(), ex);
} finally {
synchronized (saveLock) {
activeSaveThreads--;
}
}
};
Thread saveThread = USE_VIRTUAL_THREAD ?
Thread.ofVirtual().name("Linear IO - " + LinearRegionFile.this.hashCode()).unstarted(flushOperation) :
Thread.ofPlatform().name("Linear IO - " + LinearRegionFile.this.hashCode()).unstarted(flushOperation);
saveThread.setPriority(Thread.NORM_PRIORITY - 3);
saveThread.start();
}
}
LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(SAVE_DELAY_MS));
}
};
this.bindThread = USE_VIRTUAL_THREAD ? Thread.ofVirtual().unstarted(flushCheck) : Thread.ofPlatform().unstarted(flushCheck);
this.bindThread.setName("Linear IO Schedule - " + this.hashCode());
this.regionFile = path;
this.compressionLevel = compressionLevel;
this.compressor = LZ4Factory.fastestInstance().fastCompressor();
this.decompressor = LZ4Factory.fastestInstance().fastDecompressor();
}
private synchronized void markToSave() {
synchronized(markedToSaveLock) {
markedToSave = true;
}
}
private synchronized boolean isMarkedToSave() {
synchronized(markedToSaveLock) {
if(markedToSave) {
markedToSave = false;
return true;
}
return false;
}
}
public static int SAVE_THREAD_MAX_COUNT = 6;
public static int SAVE_DELAY_MS = 100;
public static boolean USE_VIRTUAL_THREAD = true;
private static final Object saveLock = new Object();
private static int activeSaveThreads = 0;
/*public void run() {
while (!close) {
synchronized (saveLock) {
if (markedToSave && activeSaveThreads < SAVE_THREAD_MAX_COUNT) {
activeSaveThreads++;
Thread saveThread = new Thread(() -> {
try {
flush();
} catch (IOException ex) {
LOGGER.error("Region file " + this.regionFile.toAbsolutePath() + " flush failed", ex);
} finally {
synchronized (saveLock) {
activeSaveThreads--;
}
}
}, "RegionFileFlush");
saveThread.setPriority(Thread.NORM_PRIORITY - 3);
saveThread.start();
}
}
LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(SAVE_DELAY_MS));
}
}*/
public synchronized boolean doesChunkExist(ChunkPos pos) throws Exception {
openRegionFile();
throw new Exception("doesChunkExist is a stub");
}
public synchronized void flush() throws IOException {
if(!isMarkedToSave()) return;
openRegionFile();
long timestamp = getTimestamp();
long writeStart = System.nanoTime();
File tempFile = new File(regionFile.toString() + ".tmp");
FileOutputStream fileStream = new FileOutputStream(tempFile);
DataOutputStream dataStream = new DataOutputStream(fileStream);
dataStream.writeLong(SUPERBLOCK);
dataStream.writeByte(VERSION);
dataStream.writeLong(timestamp);
dataStream.writeByte(gridSize);
String fileName = regionFile.getFileName().toString();
String[] parts = fileName.split("\\.");
int regionX = 0;
int regionZ = 0;
try {
if (parts.length >= 4) {
regionX = Integer.parseInt(parts[1]);
regionZ = Integer.parseInt(parts[2]);
} else {
LOGGER.warn("Unexpected file name format: " + fileName);
}
} catch (NumberFormatException e) {
LOGGER.error("Failed to parse region coordinates from file name: " + fileName, e);
}
dataStream.writeInt(regionX);
dataStream.writeInt(regionZ);
boolean[] chunkExistenceBitmap = new boolean[1024];
for (int i = 0; i < 1024; i++) {
chunkExistenceBitmap[i] = (this.bufferUncompressedSize[i] > 0);
}
writeSerializedExistenceBitmap(dataStream, chunkExistenceBitmap);
writeNBTFeatures(dataStream);
int bucketMisses = 0;
byte[][] buckets = new byte[gridSize * gridSize][];
for (int bx = 0; bx < gridSize; bx++) {
for (int bz = 0; bz < gridSize; bz++) {
if (bucketBuffers != null && bucketBuffers[bx * gridSize + bz] != null) {
buckets[bx * gridSize + bz] = bucketBuffers[bx * gridSize + bz];
continue;
}
bucketMisses++;
ByteArrayOutputStream bucketStream = new ByteArrayOutputStream();
ZstdOutputStream zstdStream = new ZstdOutputStream(bucketStream, this.compressionLevel);
DataOutputStream bucketDataStream = new DataOutputStream(zstdStream);
boolean hasData = false;
for (int cx = 0; cx < 32 / gridSize; cx++) {
for (int cz = 0; cz < 32 / gridSize; cz++) {
int chunkIndex = (bx * 32 / gridSize + cx) + (bz * 32 / gridSize + cz) * 32;
if (this.bufferUncompressedSize[chunkIndex] > 0) {
hasData = true;
byte[] chunkData = new byte[this.bufferUncompressedSize[chunkIndex]];
this.decompressor.decompress(this.buffer[chunkIndex], 0, chunkData, 0, this.bufferUncompressedSize[chunkIndex]);
bucketDataStream.writeInt(chunkData.length + 8);
bucketDataStream.writeLong(this.chunkTimestamps[chunkIndex]);
bucketDataStream.write(chunkData);
} else {
bucketDataStream.writeInt(0);
bucketDataStream.writeLong(this.chunkTimestamps[chunkIndex]);
}
}
}
bucketDataStream.close();
if (hasData) {
buckets[bx * gridSize + bz] = bucketStream.toByteArray();
}
}
}
for (int i = 0; i < gridSize * gridSize; i++) {
dataStream.writeInt(buckets[i] != null ? buckets[i].length : 0);
dataStream.writeByte(this.compressionLevel);
long rawHash = 0;
if (buckets[i] != null) {
rawHash = LongHashFunction.xx().hashBytes(buckets[i]);
}
dataStream.writeLong(rawHash);
}
for (int i = 0; i < gridSize * gridSize; i++) {
if (buckets[i] != null) {
dataStream.write(buckets[i]);
}
}
dataStream.writeLong(SUPERBLOCK);
dataStream.flush();
fileStream.getFD().sync();
fileStream.getChannel().force(true); // Ensure atomicity on Btrfs
dataStream.close();
fileStream.close();
Files.move(tempFile.toPath(), this.regionFile, StandardCopyOption.REPLACE_EXISTING);
//System.out.println("writeStart REGION FILE FLUSH " + (System.nanoTime() - writeStart) + " misses: " + bucketMisses);
}
private void writeNBTFeatures(DataOutputStream dataStream) throws IOException {
// writeNBTFeature(dataStream, "example", 1);
dataStream.writeByte(0); // End of NBT features
}
private void writeNBTFeature(DataOutputStream dataStream, String featureName, int featureValue) throws IOException {
byte[] featureNameBytes = featureName.getBytes();
dataStream.writeByte(featureNameBytes.length);
dataStream.write(featureNameBytes);
dataStream.writeInt(featureValue);
}
public static final int MAX_CHUNK_SIZE = 500 * 1024 * 1024; // Abomination - prevent chunk dupe
public synchronized void write(ChunkPos pos, ByteBuffer buffer) {
openRegionFile();
openBucket(pos.x, pos.z);
try {
byte[] b = toByteArray(new ByteArrayInputStream(buffer.array()));
int uncompressedSize = b.length;
if (uncompressedSize > MAX_CHUNK_SIZE) {
LOGGER.error("Chunk dupe attempt " + this.regionFile);
clear(pos);
} else {
int maxCompressedLength = this.compressor.maxCompressedLength(b.length);
byte[] compressed = new byte[maxCompressedLength];
int compressedLength = this.compressor.compress(b, 0, b.length, compressed, 0, maxCompressedLength);
b = new byte[compressedLength];
System.arraycopy(compressed, 0, b, 0, compressedLength);
int index = getChunkIndex(pos.x, pos.z);
this.buffer[index] = b;
this.chunkTimestamps[index] = getTimestamp();
this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] = uncompressedSize;
}
} catch (IOException e) {
LOGGER.error("Chunk write IOException " + e + " " + this.regionFile);
}
markToSave();
}
public DataOutputStream getChunkDataOutputStream(ChunkPos pos) {
openRegionFile();
openBucket(pos.x, pos.z);
return new DataOutputStream(new BufferedOutputStream(new LinearRegionFile.ChunkBuffer(pos)));
}
@Override
public MoonriseRegionFileIO.RegionDataController.WriteData moonrise$startWrite(CompoundTag data, ChunkPos pos) throws IOException {
final DataOutputStream out = this.getChunkDataOutputStream(pos);
return new ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData(
data, ca.spottedleaf.moonrise.patches.chunk_system.io.MoonriseRegionFileIO.RegionDataController.WriteData.WriteResult.WRITE,
out, regionFile -> out.close()
);
}
private class ChunkBuffer extends ByteArrayOutputStream {
private final ChunkPos pos;
public ChunkBuffer(ChunkPos chunkcoordintpair) {
super();
this.pos = chunkcoordintpair;
}
public void close() throws IOException {
ByteBuffer bytebuffer = ByteBuffer.wrap(this.buf, 0, this.count);
LinearRegionFile.this.write(this.pos, bytebuffer);
}
}
private byte[] toByteArray(InputStream in) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] tempBuffer = new byte[4096];
int length;
while ((length = in.read(tempBuffer)) >= 0) {
out.write(tempBuffer, 0, length);
}
return out.toByteArray();
}
@Nullable
public synchronized DataInputStream getChunkDataInputStream(ChunkPos pos) {
openRegionFile();
openBucket(pos.x, pos.z);
if(this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] != 0) {
byte[] content = new byte[bufferUncompressedSize[getChunkIndex(pos.x, pos.z)]];
this.decompressor.decompress(this.buffer[getChunkIndex(pos.x, pos.z)], 0, content, 0, bufferUncompressedSize[getChunkIndex(pos.x, pos.z)]);
return new DataInputStream(new ByteArrayInputStream(content));
}
return null;
}
public synchronized void clear(ChunkPos pos) {
openRegionFile();
openBucket(pos.x, pos.z);
int i = getChunkIndex(pos.x, pos.z);
this.buffer[i] = null;
this.bufferUncompressedSize[i] = 0;
this.chunkTimestamps[i] = 0;
markToSave();
}
public synchronized boolean hasChunk(ChunkPos pos) {
openRegionFile();
openBucket(pos.x, pos.z);
return this.bufferUncompressedSize[getChunkIndex(pos.x, pos.z)] > 0;
}
public synchronized void close() throws IOException {
openRegionFile();
close = true;
try {
flush();
} catch(IOException e) {
throw new IOException("Region flush IOException " + e + " " + this.regionFile);
}
}
private static int getChunkIndex(int x, int z) {
return (x & 31) + ((z & 31) << 5);
}
private static int getTimestamp() {
return (int) (System.currentTimeMillis() / 1000L);
}
public boolean recalculateHeader() {
return false;
}
public void setOversized(int x, int z, boolean something) {}
public CompoundTag getOversizedData(int x, int z) throws IOException {
throw new IOException("getOversizedData is a stub " + this.regionFile);
}
public boolean isOversized(int x, int z) {
return false;
}
public Path getPath() {
return this.regionFile;
}
private boolean[] deserializeExistenceBitmap(ByteBuffer buffer) {
boolean[] result = new boolean[1024];
for (int i = 0; i < 128; i++) {
byte b = buffer.get();
for (int j = 0; j < 8; j++) {
result[i * 8 + j] = ((b >> (7 - j)) & 1) == 1;
}
}
return result;
}
private void writeSerializedExistenceBitmap(DataOutputStream out, boolean[] bitmap) throws IOException {
for (int i = 0; i < 128; i++) {
byte b = 0;
for (int j = 0; j < 8; j++) {
if (bitmap[i * 8 + j]) {
b |= (1 << (7 - j));
}
}
out.writeByte(b);
}
}
}

View File

@@ -0,0 +1,90 @@
package com.kiocg;
import java.util.Arrays;
public class ChunkHot {
// 热度统计总区间数量
private static final int TIMES_LENGTH = 10;
// 当前统计区间下标
private int index = -1;
// 热度统计区间
private final long[] times = new long[TIMES_LENGTH];
// 存放临时的区间数值
// 用于修正正在统计的当前区间热度没有计入总值的问题
private long temp;
// 所有区间的热度总值
private long total;
// 用于每个具体统计的计算
private long nanos;
// 当前统计是否进行中
private volatile boolean started = false;
/**
* 更新区间下标
*/
public void nextTick() {
this.index = ++this.index % TIMES_LENGTH;
}
/**
* 开始统计一个新区间
*/
public void start() {
started = true;
temp = times[this.index];
times[this.index] = 0L;
}
public boolean isStarted(){
return this.started;
}
/**
* 结束当前区间的统计
* 将统计值更新入热度总值
*/
public void stop() {
started = false;
total -= temp;
total += times[this.index];
}
/**
* 开始一个具体统计
*/
public void startTicking() {
if (!started) return;
nanos = System.nanoTime();
}
/**
* 结束一个具体统计
* 将统计值计入当前热度区间
*/
public void stopTickingAndCount() {
if (!started) return;
// 定义一个具体统计的最大值为 1,000,000
// 有时候某个具体统计的计算值会在某1刻飙升可能是由于保存数据到磁盘
times[this.index] += Math.min(System.nanoTime() - nanos, 1000000L);
}
/**
* 清空统计 (当区块卸载时)
*/
public void clear() {
started = false;
Arrays.fill(times, 0L);
temp = 0L;
total = 0L;
nanos = 0L;
}
/**
* @return 获取区块热度平均值
*/
public long getAverage() {
return total / ((long) TIMES_LENGTH * 20L);
}
}

View File

@@ -0,0 +1,34 @@
package com.logisticscraft.occlusionculling;
import com.logisticscraft.occlusionculling.util.Vec3d;
public interface DataProvider {
/**
* Prepares the requested chunk. Returns true if the chunk is ready, false when
* not loaded. Should not reload the chunk when the x and y are the same as the
* last request!
*
* @param chunkX
* @param chunkZ
* @return
*/
boolean prepareChunk(int chunkX, int chunkZ);
/**
* Location is inside the chunk.
*
* @param x
* @param y
* @param z
* @return
*/
boolean isOpaqueFullCube(int x, int y, int z);
default void cleanup() {
}
default void checkingPosition(Vec3d[] targetPoints, int size, Vec3d viewerPosition) {
}
}

View File

@@ -0,0 +1,515 @@
package com.logisticscraft.occlusionculling;
import java.util.Arrays;
import java.util.BitSet;
import com.logisticscraft.occlusionculling.cache.ArrayOcclusionCache;
import com.logisticscraft.occlusionculling.cache.OcclusionCache;
import com.logisticscraft.occlusionculling.util.MathUtilities;
import com.logisticscraft.occlusionculling.util.Vec3d;
public class OcclusionCullingInstance {
private static final int ON_MIN_X = 0x01;
private static final int ON_MAX_X = 0x02;
private static final int ON_MIN_Y = 0x04;
private static final int ON_MAX_Y = 0x08;
private static final int ON_MIN_Z = 0x10;
private static final int ON_MAX_Z = 0x20;
private final int reach;
private final double aabbExpansion;
private final DataProvider provider;
private final OcclusionCache cache;
// Reused allocated data structures
private final BitSet skipList = new BitSet(); // Grows bigger in case some mod introduces giant hitboxes
private final Vec3d[] targetPoints = new Vec3d[15];
private final Vec3d targetPos = new Vec3d(0, 0, 0);
private final int[] cameraPos = new int[3];
private final boolean[] dotselectors = new boolean[14];
private boolean allowRayChecks = false;
private final int[] lastHitBlock = new int[3];
private boolean allowWallClipping = false;
public OcclusionCullingInstance(int maxDistance, DataProvider provider) {
this(maxDistance, provider, new ArrayOcclusionCache(maxDistance), 0.5);
}
public OcclusionCullingInstance(int maxDistance, DataProvider provider, OcclusionCache cache, double aabbExpansion) {
this.reach = maxDistance;
this.provider = provider;
this.cache = cache;
this.aabbExpansion = aabbExpansion;
for(int i = 0; i < targetPoints.length; i++) {
targetPoints[i] = new Vec3d(0, 0, 0);
}
}
public boolean isAABBVisible(Vec3d aabbMin, Vec3d aabbMax, Vec3d viewerPosition) {
try {
int maxX = MathUtilities.floor(aabbMax.x
+ aabbExpansion);
int maxY = MathUtilities.floor(aabbMax.y
+ aabbExpansion);
int maxZ = MathUtilities.floor(aabbMax.z
+ aabbExpansion);
int minX = MathUtilities.floor(aabbMin.x
- aabbExpansion);
int minY = MathUtilities.floor(aabbMin.y
- aabbExpansion);
int minZ = MathUtilities.floor(aabbMin.z
- aabbExpansion);
cameraPos[0] = MathUtilities.floor(viewerPosition.x);
cameraPos[1] = MathUtilities.floor(viewerPosition.y);
cameraPos[2] = MathUtilities.floor(viewerPosition.z);
Relative relX = Relative.from(minX, maxX, cameraPos[0]);
Relative relY = Relative.from(minY, maxY, cameraPos[1]);
Relative relZ = Relative.from(minZ, maxZ, cameraPos[2]);
if(relX == Relative.INSIDE && relY == Relative.INSIDE && relZ == Relative.INSIDE) {
return true; // We are inside of the AABB, don't cull
}
skipList.clear();
// Just check the cache first
int id = 0;
for (int x = minX; x <= maxX; x++) {
for (int y = minY; y <= maxY; y++) {
for (int z = minZ; z <= maxZ; z++) {
int cachedValue = getCacheValue(x, y, z);
if (cachedValue == 1) {
// non-occluding
return true;
}
if (cachedValue != 0) {
// was checked and it wasn't visible
skipList.set(id);
}
id++;
}
}
}
// only after the first hit wall the cache becomes valid.
allowRayChecks = false;
// since the cache wasn't helpfull
id = 0;
for (int x = minX; x <= maxX; x++) {
byte visibleOnFaceX = 0;
byte faceEdgeDataX = 0;
faceEdgeDataX |= (x == minX) ? ON_MIN_X : 0;
faceEdgeDataX |= (x == maxX) ? ON_MAX_X : 0;
visibleOnFaceX |= (x == minX && relX == Relative.POSITIVE) ? ON_MIN_X : 0;
visibleOnFaceX |= (x == maxX && relX == Relative.NEGATIVE) ? ON_MAX_X : 0;
for (int y = minY; y <= maxY; y++) {
byte faceEdgeDataY = faceEdgeDataX;
byte visibleOnFaceY = visibleOnFaceX;
faceEdgeDataY |= (y == minY) ? ON_MIN_Y : 0;
faceEdgeDataY |= (y == maxY) ? ON_MAX_Y : 0;
visibleOnFaceY |= (y == minY && relY == Relative.POSITIVE) ? ON_MIN_Y : 0;
visibleOnFaceY |= (y == maxY && relY == Relative.NEGATIVE) ? ON_MAX_Y : 0;
for (int z = minZ; z <= maxZ; z++) {
byte faceEdgeData = faceEdgeDataY;
byte visibleOnFace = visibleOnFaceY;
faceEdgeData |= (z == minZ) ? ON_MIN_Z : 0;
faceEdgeData |= (z == maxZ) ? ON_MAX_Z : 0;
visibleOnFace |= (z == minZ && relZ == Relative.POSITIVE) ? ON_MIN_Z : 0;
visibleOnFace |= (z == maxZ && relZ == Relative.NEGATIVE) ? ON_MAX_Z : 0;
if(skipList.get(id)) { // was checked and it wasn't visible
id++;
continue;
}
if (visibleOnFace != 0) {
targetPos.set(x, y, z);
if (isVoxelVisible(viewerPosition, targetPos, faceEdgeData, visibleOnFace)) {
return true;
}
}
id++;
}
}
}
return false;
} catch (Throwable t) {
// Failsafe
t.printStackTrace();
}
return true;
}
/**
* @param viewerPosition
* @param position
* @param faceData contains rather this Block is on the outside for a given face
* @param visibleOnFace contains rather a face should be concidered
* @return
*/
private boolean isVoxelVisible(Vec3d viewerPosition, Vec3d position, byte faceData, byte visibleOnFace) {
int targetSize = 0;
Arrays.fill(dotselectors, false);
if((visibleOnFace & ON_MIN_X) == ON_MIN_X){
dotselectors[0] = true;
if((faceData & ~ON_MIN_X) != 0) {
dotselectors[1] = true;
dotselectors[4] = true;
dotselectors[5] = true;
}
dotselectors[8] = true;
}
if((visibleOnFace & ON_MIN_Y) == ON_MIN_Y){
dotselectors[0] = true;
if((faceData & ~ON_MIN_Y) != 0) {
dotselectors[3] = true;
dotselectors[4] = true;
dotselectors[7] = true;
}
dotselectors[9] = true;
}
if((visibleOnFace & ON_MIN_Z) == ON_MIN_Z){
dotselectors[0] = true;
if((faceData & ~ON_MIN_Z) != 0) {
dotselectors[1] = true;
dotselectors[4] = true;
dotselectors[5] = true;
}
dotselectors[10] = true;
}
if((visibleOnFace & ON_MAX_X) == ON_MAX_X){
dotselectors[4] = true;
if((faceData & ~ON_MAX_X) != 0) {
dotselectors[5] = true;
dotselectors[6] = true;
dotselectors[7] = true;
}
dotselectors[11] = true;
}
if((visibleOnFace & ON_MAX_Y) == ON_MAX_Y){
dotselectors[1] = true;
if((faceData & ~ON_MAX_Y) != 0) {
dotselectors[2] = true;
dotselectors[5] = true;
dotselectors[6] = true;
}
dotselectors[12] = true;
}
if((visibleOnFace & ON_MAX_Z) == ON_MAX_Z){
dotselectors[2] = true;
if((faceData & ~ON_MAX_Z) != 0) {
dotselectors[3] = true;
dotselectors[6] = true;
dotselectors[7] = true;
}
dotselectors[13] = true;
}
if (dotselectors[0])targetPoints[targetSize++].setAdd(position, 0.05, 0.05, 0.05);
if (dotselectors[1])targetPoints[targetSize++].setAdd(position, 0.05, 0.95, 0.05);
if (dotselectors[2])targetPoints[targetSize++].setAdd(position, 0.05, 0.95, 0.95);
if (dotselectors[3])targetPoints[targetSize++].setAdd(position, 0.05, 0.05, 0.95);
if (dotselectors[4])targetPoints[targetSize++].setAdd(position, 0.95, 0.05, 0.05);
if (dotselectors[5])targetPoints[targetSize++].setAdd(position, 0.95, 0.95, 0.05);
if (dotselectors[6])targetPoints[targetSize++].setAdd(position, 0.95, 0.95, 0.95);
if (dotselectors[7])targetPoints[targetSize++].setAdd(position, 0.95, 0.05, 0.95);
// middle points
if (dotselectors[8])targetPoints[targetSize++].setAdd(position, 0.05, 0.5, 0.5);
if (dotselectors[9])targetPoints[targetSize++].setAdd(position, 0.5, 0.05, 0.5);
if (dotselectors[10])targetPoints[targetSize++].setAdd(position, 0.5, 0.5, 0.05);
if (dotselectors[11])targetPoints[targetSize++].setAdd(position, 0.95, 0.5, 0.5);
if (dotselectors[12])targetPoints[targetSize++].setAdd(position, 0.5, 0.95, 0.5);
if (dotselectors[13])targetPoints[targetSize++].setAdd(position, 0.5, 0.5, 0.95);
return isVisible(viewerPosition, targetPoints, targetSize);
}
private boolean rayIntersection(int[] b, Vec3d rayOrigin, Vec3d rayDir) {
Vec3d rInv = new Vec3d(1, 1, 1).div(rayDir);
double t1 = (b[0] - rayOrigin.x) * rInv.x;
double t2 = (b[0] + 1 - rayOrigin.x) * rInv.x;
double t3 = (b[1] - rayOrigin.y) * rInv.y;
double t4 = (b[1] + 1 - rayOrigin.y) * rInv.y;
double t5 = (b[2] - rayOrigin.z) * rInv.z;
double t6 = (b[2] + 1 - rayOrigin.z) * rInv.z;
double tmin = Math.max(Math.max(Math.min(t1, t2), Math.min(t3, t4)), Math.min(t5, t6));
double tmax = Math.min(Math.min(Math.max(t1, t2), Math.max(t3, t4)), Math.max(t5, t6));
// if tmax > 0, ray (line) is intersecting AABB, but the whole AABB is behind us
if (tmax > 0) {
return false;
}
// if tmin > tmax, ray doesn't intersect AABB
if (tmin > tmax) {
return false;
}
return true;
}
/**
* returns the grid cells that intersect with this Vec3d<br>
* <a href=
* "http://playtechs.blogspot.de/2007/03/raytracing-on-grid.html">http://playtechs.blogspot.de/2007/03/raytracing-on-grid.html</a>
* <p>
* Caching assumes that all Vec3d's are inside the same block
*/
private boolean isVisible(Vec3d start, Vec3d[] targets, int size) {
// start cell coordinate
int x = cameraPos[0];
int y = cameraPos[1];
int z = cameraPos[2];
for (int v = 0; v < size; v++) {
// ray-casting target
Vec3d target = targets[v];
double relativeX = start.x - target.getX();
double relativeY = start.y - target.getY();
double relativeZ = start.z - target.getZ();
if(allowRayChecks && rayIntersection(lastHitBlock, start, new Vec3d(relativeX, relativeY, relativeZ).normalize())) {
continue;
}
// horizontal and vertical cell amount spanned
double dimensionX = Math.abs(relativeX);
double dimensionY = Math.abs(relativeY);
double dimensionZ = Math.abs(relativeZ);
// distance between horizontal intersection points with cell border as a
// fraction of the total Vec3d length
double dimFracX = 1f / dimensionX;
// distance between vertical intersection points with cell border as a fraction
// of the total Vec3d length
double dimFracY = 1f / dimensionY;
double dimFracZ = 1f / dimensionZ;
// total amount of intersected cells
int intersectCount = 1;
// 1, 0 or -1
// determines the direction of the next cell (horizontally / vertically)
int x_inc, y_inc, z_inc;
// the distance to the next horizontal / vertical intersection point with a cell
// border as a fraction of the total Vec3d length
double t_next_y, t_next_x, t_next_z;
if (dimensionX == 0f) {
x_inc = 0;
t_next_x = dimFracX; // don't increment horizontally because the Vec3d is perfectly vertical
} else if (target.x > start.x) {
x_inc = 1; // target point is horizontally greater than starting point so increment every
// step by 1
intersectCount += MathUtilities.floor(target.x) - x; // increment total amount of intersecting cells
t_next_x = (float) ((x + 1 - start.x) * dimFracX); // calculate the next horizontal
// intersection
// point based on the position inside
// the first cell
} else {
x_inc = -1; // target point is horizontally smaller than starting point so reduce every step
// by 1
intersectCount += x - MathUtilities.floor(target.x); // increment total amount of intersecting cells
t_next_x = (float) ((start.x - x)
* dimFracX); // calculate the next horizontal
// intersection point
// based on the position inside
// the first cell
}
if (dimensionY == 0f) {
y_inc = 0;
t_next_y = dimFracY; // don't increment vertically because the Vec3d is perfectly horizontal
} else if (target.y > start.y) {
y_inc = 1; // target point is vertically greater than starting point so increment every
// step by 1
intersectCount += MathUtilities.floor(target.y) - y; // increment total amount of intersecting cells
t_next_y = (float) ((y + 1 - start.y)
* dimFracY); // calculate the next vertical
// intersection
// point based on the position inside
// the first cell
} else {
y_inc = -1; // target point is vertically smaller than starting point so reduce every step
// by 1
intersectCount += y - MathUtilities.floor(target.y); // increment total amount of intersecting cells
t_next_y = (float) ((start.y - y)
* dimFracY); // calculate the next vertical intersection
// point
// based on the position inside
// the first cell
}
if (dimensionZ == 0f) {
z_inc = 0;
t_next_z = dimFracZ; // don't increment vertically because the Vec3d is perfectly horizontal
} else if (target.z > start.z) {
z_inc = 1; // target point is vertically greater than starting point so increment every
// step by 1
intersectCount += MathUtilities.floor(target.z) - z; // increment total amount of intersecting cells
t_next_z = (float) ((z + 1 - start.z)
* dimFracZ); // calculate the next vertical
// intersection
// point based on the position inside
// the first cell
} else {
z_inc = -1; // target point is vertically smaller than starting point so reduce every step
// by 1
intersectCount += z - MathUtilities.floor(target.z); // increment total amount of intersecting cells
t_next_z = (float) ((start.z - z)
* dimFracZ); // calculate the next vertical intersection
// point
// based on the position inside
// the first cell
}
boolean finished = stepRay(start, x, y, z,
dimFracX, dimFracY, dimFracZ, intersectCount, x_inc, y_inc,
z_inc, t_next_y, t_next_x, t_next_z);
provider.cleanup();
if (finished) {
cacheResult(targets[0], true);
return true;
} else {
allowRayChecks = true;
}
}
cacheResult(targets[0], false);
return false;
}
private boolean stepRay(Vec3d start, int currentX, int currentY,
int currentZ, double distInX, double distInY,
double distInZ, int n, int x_inc, int y_inc,
int z_inc, double t_next_y, double t_next_x,
double t_next_z) {
allowWallClipping = true; // initially allow rays to go through walls till they are on the outside
// iterate through all intersecting cells (n times)
for (; n > 1; n--) { // n-1 times because we don't want to check the last block
// towards - where from
// get cached value, 0 means uncached (default)
int cVal = getCacheValue(currentX, currentY, currentZ);
if (cVal == 2 && !allowWallClipping) {
// block cached as occluding, stop ray
lastHitBlock[0] = currentX;
lastHitBlock[1] = currentY;
lastHitBlock[2] = currentZ;
return false;
}
if (cVal == 0) {
// save current cell
int chunkX = currentX >> 4;
int chunkZ = currentZ >> 4;
if (!provider.prepareChunk(chunkX, chunkZ)) { // Chunk not ready
return false;
}
if (provider.isOpaqueFullCube(currentX, currentY, currentZ)) {
if (!allowWallClipping) {
cache.setLastHidden();
lastHitBlock[0] = currentX;
lastHitBlock[1] = currentY;
lastHitBlock[2] = currentZ;
return false;
}
} else {
// outside of wall, now clipping is not allowed
allowWallClipping = false;
cache.setLastVisible();
}
}
if(cVal == 1) {
// outside of wall, now clipping is not allowed
allowWallClipping = false;
}
if (t_next_y < t_next_x && t_next_y < t_next_z) { // next cell is upwards/downwards because the distance to
// the next vertical
// intersection point is smaller than to the next horizontal intersection point
currentY += y_inc; // move up/down
t_next_y += distInY; // update next vertical intersection point
} else if (t_next_x < t_next_y && t_next_x < t_next_z) { // next cell is right/left
currentX += x_inc; // move right/left
t_next_x += distInX; // update next horizontal intersection point
} else {
currentZ += z_inc; // move right/left
t_next_z += distInZ; // update next horizontal intersection point
}
}
return true;
}
// -1 = invalid location, 0 = not checked yet, 1 = visible, 2 = occluding
private int getCacheValue(int x, int y, int z) {
x -= cameraPos[0];
y -= cameraPos[1];
z -= cameraPos[2];
if (Math.abs(x) > reach - 2 || Math.abs(y) > reach - 2
|| Math.abs(z) > reach - 2) {
return -1;
}
// check if target is already known
return cache.getState(x + reach, y + reach, z + reach);
}
private void cacheResult(int x, int y, int z, boolean result) {
int cx = x - cameraPos[0] + reach;
int cy = y - cameraPos[1] + reach;
int cz = z - cameraPos[2] + reach;
if (result) {
cache.setVisible(cx, cy, cz);
} else {
cache.setHidden(cx, cy, cz);
}
}
private void cacheResult(Vec3d vector, boolean result) {
int cx = MathUtilities.floor(vector.x) - cameraPos[0] + reach;
int cy = MathUtilities.floor(vector.y) - cameraPos[1] + reach;
int cz = MathUtilities.floor(vector.z) - cameraPos[2] + reach;
if (result) {
cache.setVisible(cx, cy, cz);
} else {
cache.setHidden(cx, cy, cz);
}
}
public void resetCache() {
this.cache.resetCache();
}
private enum Relative {
INSIDE, POSITIVE, NEGATIVE;
public static Relative from(int min, int max, int pos) {
if (max > pos && min > pos) {
return POSITIVE;
} else if (min < pos && max < pos) {
return NEGATIVE;
}
return INSIDE;
}
}
}

Some files were not shown because too many files have changed in this diff Show More