Compare commits
37 Commits
custom-blo
...
6.74.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e6960fab5 | ||
|
|
c95c1f032d | ||
|
|
1d9fc3413d | ||
|
|
19c099e2ef | ||
|
|
91e224020b | ||
|
|
19fc168034 | ||
|
|
a11815af82 | ||
|
|
2a63c62800 | ||
|
|
fd806621cf | ||
|
|
f1b831bfb4 | ||
|
|
aa3dae1d4e | ||
|
|
413ae4e94d | ||
|
|
1a816b0f14 | ||
|
|
c2935a45dc | ||
|
|
1338c0fadc | ||
|
|
2ccdbe4bc2 | ||
|
|
3d78bad4b1 | ||
|
|
84d481d753 | ||
|
|
e87b7ceb77 | ||
|
|
fd031e21f5 | ||
|
|
449bcc1ff8 | ||
|
|
93bcf6ce44 | ||
|
|
afcbcfa527 | ||
|
|
2ed1f2bb2f | ||
|
|
9cb596e746 | ||
|
|
763a3e9a87 | ||
|
|
f01691663a | ||
|
|
6c91b4e41f | ||
|
|
06fdb25925 | ||
|
|
f1b71c2ac9 | ||
|
|
665857a00f | ||
|
|
f18016b2de | ||
|
|
88633f94cb | ||
|
|
903084e574 | ||
|
|
dab0ce2ed2 | ||
|
|
f01e18950c | ||
|
|
d4a6bf105b |
@@ -14,7 +14,6 @@ plugins {
|
||||
id("maven-publish")
|
||||
id("java")
|
||||
kotlin("jvm") version "1.9.21"
|
||||
kotlin("plugin.serialization") version "1.9.21"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
@@ -41,7 +40,6 @@ allprojects {
|
||||
apply(plugin = "maven-publish")
|
||||
apply(plugin = "io.github.goooler.shadow")
|
||||
apply(plugin = "kotlin")
|
||||
apply(plugin = "org.jetbrains.kotlin.plugin.serialization")
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
@@ -63,7 +61,7 @@ allprojects {
|
||||
maven("https://repo.extendedclip.com/content/repositories/placeholderapi/")
|
||||
|
||||
// ProtocolLib
|
||||
//maven("https://repo.dmulloy2.net/nexus/repository/public/")
|
||||
maven("https://repo.dmulloy2.net/nexus/repository/public/")
|
||||
|
||||
// WorldGuard
|
||||
maven("https://maven.enginehub.org/repo/")
|
||||
@@ -212,7 +210,6 @@ tasks {
|
||||
//relocate("com.mysql", "com.willfp.eco.libs.mysql")
|
||||
relocate("com.mongodb", "com.willfp.eco.libs.mongodb")
|
||||
relocate("org.bson", "com.willfp.eco.libs.bson")
|
||||
relocate("org.litote", "com.willfp.eco.libs.litote")
|
||||
relocate("org.reactivestreams", "com.willfp.eco.libs.reactivestreams")
|
||||
relocate("reactor.", "com.willfp.eco.libs.reactor.") // Dot in name to be safe
|
||||
relocate("com.moandjiezana.toml", "com.willfp.eco.libs.toml")
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Handles data read/write for a {@link com.willfp.eco.core.data.keys.PersistentDataKeyType} for a specific
|
||||
* data handler.
|
||||
*
|
||||
* @param <T> The type of data.
|
||||
*/
|
||||
public abstract class DataTypeSerializer<T> {
|
||||
/**
|
||||
* Create a new data type serializer.
|
||||
*/
|
||||
protected DataTypeSerializer() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @return The value.
|
||||
*/
|
||||
@Nullable
|
||||
public abstract T readAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key);
|
||||
|
||||
/**
|
||||
* Write a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param value The value.
|
||||
*/
|
||||
public abstract void writeAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value);
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import com.willfp.eco.core.registry.Registrable;
|
||||
import com.willfp.eco.core.tuples.Pair;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Handles persistent data.
|
||||
*/
|
||||
public abstract class PersistentDataHandler implements Registrable {
|
||||
/**
|
||||
* The id.
|
||||
*/
|
||||
private final String id;
|
||||
|
||||
/**
|
||||
* The executor.
|
||||
*/
|
||||
private final ExecutorService executor = Executors.newCachedThreadPool();
|
||||
|
||||
/**
|
||||
* Create a new persistent data handler.
|
||||
*
|
||||
* @param id The id.
|
||||
*/
|
||||
protected PersistentDataHandler(@NotNull final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all UUIDs with saved data.
|
||||
* <p>
|
||||
* This is a blocking operation.
|
||||
*
|
||||
* @return All saved UUIDs.
|
||||
*/
|
||||
public abstract Set<UUID> getSavedUUIDs();
|
||||
|
||||
/**
|
||||
* Save to disk.
|
||||
* <p>
|
||||
* If write commits to disk, this method does not need to be overridden.
|
||||
* <p>
|
||||
* This method is called asynchronously.
|
||||
*/
|
||||
protected void doSave() {
|
||||
// Save to disk
|
||||
}
|
||||
|
||||
/**
|
||||
* If the handler should autosave.
|
||||
*
|
||||
* @return If the handler should autosave.
|
||||
*/
|
||||
public boolean shouldAutosave() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the data.
|
||||
*/
|
||||
public final void save() {
|
||||
executor.submit(this::doSave);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a key from persistent data.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param <T> The type of the key.
|
||||
* @return The value, or null if not found.
|
||||
*/
|
||||
@Nullable
|
||||
public final <T> T read(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
Future<T> future = executor.submit(() -> serializer.readAsync(uuid, key));
|
||||
|
||||
try {
|
||||
return future.get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a key to persistent data.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param value The value.
|
||||
* @param <T> The type of the key.
|
||||
*/
|
||||
public final <T> void write(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
executor.submit(() -> serializer.writeAsync(uuid, key, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize profile.
|
||||
*
|
||||
* @param uuid The uuid to serialize.
|
||||
* @param keys The keys to serialize.
|
||||
* @return The serialized data.
|
||||
*/
|
||||
@NotNull
|
||||
public final SerializedProfile serializeProfile(@NotNull final UUID uuid,
|
||||
@NotNull final Set<PersistentDataKey<?>> keys) {
|
||||
Map<PersistentDataKey<?>, CompletableFuture<Object>> futures = keys.stream()
|
||||
.collect(Collectors.toMap(
|
||||
key -> key,
|
||||
key -> CompletableFuture.supplyAsync(() -> read(uuid, key), executor)
|
||||
));
|
||||
|
||||
Map<PersistentDataKey<?>, Object> data = futures.entrySet().stream()
|
||||
.map(entry -> new Pair<PersistentDataKey<?>, Object>(entry.getKey(), entry.getValue().join()))
|
||||
.filter(entry -> entry.getSecond() != null)
|
||||
.collect(Collectors.toMap(Pair::getFirst, Pair::getSecond));
|
||||
|
||||
return new SerializedProfile(uuid, data);
|
||||
}
|
||||
|
||||
/**`
|
||||
* Load profile data.
|
||||
*
|
||||
* @param profile The profile.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public final void loadSerializedProfile(@NotNull final SerializedProfile profile) {
|
||||
for (Map.Entry<PersistentDataKey<?>, Object> entry : profile.data().entrySet()) {
|
||||
PersistentDataKey<?> key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
// This cast is safe because the data is serialized
|
||||
write(profile.uuid(), (PersistentDataKey<? super Object>) key, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save and shutdown the handler.
|
||||
*
|
||||
* @throws InterruptedException If the writes could not be awaited.
|
||||
*/
|
||||
public final void shutdown() throws InterruptedException {
|
||||
doSave();
|
||||
|
||||
if (executor.isShutdown()) {
|
||||
return;
|
||||
}
|
||||
|
||||
executor.shutdown();
|
||||
while (!executor.awaitTermination(2, TimeUnit.MINUTES)) {
|
||||
// Wait
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@NotNull
|
||||
public final String getID() {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Serialized profile.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param data The data.
|
||||
*/
|
||||
public record SerializedProfile(
|
||||
@NotNull UUID uuid,
|
||||
@NotNull Map<PersistentDataKey<?>, Object> data
|
||||
) {
|
||||
|
||||
}
|
||||
@@ -34,6 +34,19 @@ public final class PersistentDataKey<T> {
|
||||
*/
|
||||
private final boolean isLocal;
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
* @param key The key.
|
||||
* @param type The data type.
|
||||
* @param defaultValue The default value.
|
||||
*/
|
||||
public PersistentDataKey(@NotNull final NamespacedKey key,
|
||||
@NotNull final PersistentDataKeyType<T> type,
|
||||
@NotNull final T defaultValue) {
|
||||
this(key, type, defaultValue, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
@@ -54,24 +67,6 @@ public final class PersistentDataKey<T> {
|
||||
Eco.get().registerPersistentKey(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
* @param key The key.
|
||||
* @param type The data type.
|
||||
* @param defaultValue The default value.
|
||||
*/
|
||||
public PersistentDataKey(@NotNull final NamespacedKey key,
|
||||
@NotNull final PersistentDataKeyType<T> type,
|
||||
@NotNull final T defaultValue) {
|
||||
this.key = key;
|
||||
this.defaultValue = defaultValue;
|
||||
this.type = type;
|
||||
this.isLocal = false;
|
||||
|
||||
Eco.get().registerPersistentKey(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PersistentDataKey{"
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
package com.willfp.eco.core.data.keys;
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config;
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer;
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@@ -61,18 +66,14 @@ public final class PersistentDataKeyType<T> {
|
||||
private final String name;
|
||||
|
||||
/**
|
||||
* Get the name of the key type.
|
||||
*
|
||||
* @return The name.
|
||||
* The serializers for this key type.
|
||||
*/
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
private final Map<PersistentDataHandler, DataTypeSerializer<T>> serializers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Create new PersistentDataKeyType.
|
||||
*
|
||||
* @param name The name.
|
||||
* @param name The name.
|
||||
*/
|
||||
private PersistentDataKeyType(@NotNull final String name) {
|
||||
VALUES.add(this);
|
||||
@@ -80,6 +81,44 @@ public final class PersistentDataKeyType<T> {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the key type.
|
||||
*
|
||||
* @return The name.
|
||||
*/
|
||||
@NotNull
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a serializer for this key type.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @param serializer The serializer.
|
||||
*/
|
||||
public void registerSerializer(@NotNull final PersistentDataHandler handler,
|
||||
@NotNull final DataTypeSerializer<T> serializer) {
|
||||
this.serializers.put(handler, serializer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the serializer for a handler.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @return The serializer.
|
||||
*/
|
||||
@NotNull
|
||||
public DataTypeSerializer<T> getSerializer(@NotNull final PersistentDataHandler handler) {
|
||||
DataTypeSerializer<T> serializer = this.serializers.get(handler);
|
||||
|
||||
if (serializer == null) {
|
||||
throw new NoSuchElementException("No serializer for handler: " + handler);
|
||||
}
|
||||
|
||||
return serializer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable final Object that) {
|
||||
if (this == that) {
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.willfp.eco.core.proxy;
|
||||
|
||||
import com.willfp.eco.core.version.Version;
|
||||
import org.bukkit.Bukkit;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -35,6 +36,13 @@ public final class ProxyConstants {
|
||||
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
|
||||
}
|
||||
|
||||
private static String convertVersion(@NotNull final String version) {
|
||||
return switch (version) {
|
||||
case "v1_21_1" -> "v1_21";
|
||||
default -> version;
|
||||
};
|
||||
}
|
||||
|
||||
static {
|
||||
String currentMinecraftVersion = Bukkit.getServer().getBukkitVersion().split("-")[0];
|
||||
String nmsVersion;
|
||||
@@ -45,6 +53,6 @@ public final class ProxyConstants {
|
||||
nmsVersion = "v" + currentMinecraftVersion.replace(".", "_");
|
||||
}
|
||||
|
||||
NMS_VERSION = nmsVersion;
|
||||
NMS_VERSION = convertVersion(nmsVersion);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,12 +14,16 @@ object ArgParserEnchantment : LookupArgParser {
|
||||
val enchants = mutableMapOf<Enchantment, Int>()
|
||||
|
||||
for (arg in args) {
|
||||
val argSplit = arg.split(":")
|
||||
try {
|
||||
val argSplit = arg.split(":")
|
||||
|
||||
val enchant = Enchantment.getByKey(NamespacedKey.minecraft(argSplit[0].lowercase())) ?: continue
|
||||
val level = argSplit.getOrNull(1)?.toIntOrNull() ?: enchant.maxLevel
|
||||
val enchant = Enchantment.getByKey(NamespacedKey.minecraft(argSplit[0].lowercase())) ?: continue
|
||||
val level = argSplit.getOrNull(1)?.toIntOrNull() ?: enchant.maxLevel
|
||||
|
||||
enchants[enchant] = level
|
||||
enchants[enchant] = level
|
||||
} catch (e: IllegalArgumentException) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (enchants.isEmpty()) {
|
||||
|
||||
@@ -12,7 +12,7 @@ object ParticleFactoryRGB : ParticleFactory {
|
||||
if (Prerequisite.HAS_1_20_5.isMet) {
|
||||
Particle.valueOf("DUST")
|
||||
} else {
|
||||
Particle.valueOf("REDSTONE_DUST")
|
||||
Particle.valueOf("REDSTONE")
|
||||
}
|
||||
}.getOrNull()
|
||||
|
||||
|
||||
@@ -3,12 +3,19 @@ package com.willfp.eco.internal.spigot.proxy.common.packet.display
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.packet.PacketEvent
|
||||
import com.willfp.eco.core.packet.PacketListener
|
||||
import com.willfp.eco.internal.spigot.proxy.common.toResourceLocation
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import net.minecraft.network.protocol.game.ClientboundPlaceGhostRecipePacket
|
||||
import net.minecraft.resources.ResourceLocation
|
||||
|
||||
class PacketAutoRecipe(
|
||||
private val plugin: EcoPlugin
|
||||
) : PacketListener {
|
||||
private val fKey = ClientboundPlaceGhostRecipePacket::class.java
|
||||
.declaredFields
|
||||
.first { it.type == ResourceLocation::class.java }
|
||||
.apply { isAccessible = true }
|
||||
|
||||
override fun onSend(event: PacketEvent) {
|
||||
val packet = event.packet.handle as? ClientboundPlaceGhostRecipePacket ?: return
|
||||
|
||||
@@ -24,9 +31,7 @@ class PacketAutoRecipe(
|
||||
return
|
||||
}
|
||||
|
||||
val fKey = packet.javaClass.getDeclaredField("b")
|
||||
fKey.isAccessible = true
|
||||
val key = fKey[packet] as ResourceLocation
|
||||
fKey[packet] = ResourceLocation(key.namespace, key.path + "_displayed")
|
||||
fKey[packet] = namespacedKeyOf(key.namespace, key.path + "_displayed").toResourceLocation()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import net.minecraft.util.Unit
|
||||
import net.minecraft.world.item.component.CustomData
|
||||
import net.minecraft.world.item.component.CustomModelData
|
||||
import net.minecraft.world.item.component.ItemLore
|
||||
import net.minecraft.world.item.enchantment.ItemEnchantments
|
||||
import org.bukkit.Bukkit
|
||||
import org.bukkit.craftbukkit.CraftRegistry
|
||||
import org.bukkit.craftbukkit.CraftServer
|
||||
@@ -53,7 +54,7 @@ class NewEcoFastItemStack(
|
||||
private val pdc = (handle.get(DataComponents.CUSTOM_DATA)?.copyTag() ?: CompoundTag()).makePdc()
|
||||
|
||||
override fun getEnchants(checkStored: Boolean): Map<Enchantment, Int> {
|
||||
val enchantments = handle.get(DataComponents.ENCHANTMENTS) ?: return emptyMap()
|
||||
val enchantments = handle.get(DataComponents.ENCHANTMENTS) ?: ItemEnchantments.EMPTY
|
||||
|
||||
val map = mutableMapOf<Enchantment, Int>()
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import org.gradle.internal.impldep.org.junit.experimental.categories.Categories.CategoryFilter.exclude
|
||||
|
||||
group = "com.willfp"
|
||||
version = rootProject.version
|
||||
|
||||
@@ -9,16 +7,13 @@ dependencies {
|
||||
|
||||
// Libraries
|
||||
implementation("com.github.WillFP:Crunch:1.1.3")
|
||||
implementation("mysql:mysql-connector-java:8.0.25")
|
||||
implementation("org.jetbrains.exposed:exposed-core:0.37.3")
|
||||
implementation("org.jetbrains.exposed:exposed-dao:0.37.3")
|
||||
implementation("org.jetbrains.exposed:exposed-jdbc:0.37.3")
|
||||
implementation("com.zaxxer:HikariCP:5.0.0")
|
||||
implementation("com.mysql:mysql-connector-j:8.4.0")
|
||||
implementation("org.jetbrains.exposed:exposed-core:0.53.0")
|
||||
implementation("org.jetbrains.exposed:exposed-jdbc:0.53.0")
|
||||
implementation("com.zaxxer:HikariCP:5.1.0")
|
||||
implementation("net.kyori:adventure-platform-bukkit:4.1.0")
|
||||
implementation("org.javassist:javassist:3.29.2-GA")
|
||||
implementation("org.mongodb:mongodb-driver-kotlin-coroutine:5.0.0")
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-serialization-core:1.5.1")
|
||||
implementation("org.mongodb:bson-kotlinx:5.0.0")
|
||||
implementation("org.mongodb:mongodb-driver-kotlin-coroutine:5.1.2")
|
||||
implementation("com.moandjiezana.toml:toml4j:0.7.2") {
|
||||
exclude(group = "com.google.code.gson", module = "gson")
|
||||
}
|
||||
@@ -29,7 +24,7 @@ dependencies {
|
||||
compileOnly("io.papermc.paper:paper-api:1.20.2-R0.1-SNAPSHOT")
|
||||
|
||||
// Plugin dependencies
|
||||
compileOnly("com.comphenix.protocol:ProtocolLib:5.0.0-SNAPSHOT")
|
||||
compileOnly("com.comphenix.protocol:ProtocolLib:5.1.0")
|
||||
compileOnly("com.sk89q.worldguard:worldguard-bukkit:7.0.7-SNAPSHOT")
|
||||
compileOnly("com.github.TechFortress:GriefPrevention:16.17.1")
|
||||
compileOnly("com.github.TownyAdvanced:Towny:0.99.5.21") {
|
||||
@@ -76,7 +71,6 @@ dependencies {
|
||||
tasks {
|
||||
shadowJar {
|
||||
minimize {
|
||||
exclude(dependency("org.litote.kmongo:kmongo-coroutine:.*"))
|
||||
exclude(dependency("org.jetbrains.exposed:.*:.*"))
|
||||
exclude(dependency("com.willfp:ModelEngineBridge:.*"))
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import com.willfp.eco.core.Eco
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.PluginLike
|
||||
import com.willfp.eco.core.PluginProps
|
||||
import com.willfp.eco.core.Prerequisite
|
||||
import com.willfp.eco.core.command.CommandBase
|
||||
import com.willfp.eco.core.command.PluginCommandBase
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
@@ -44,8 +43,7 @@ import com.willfp.eco.internal.proxy.EcoProxyFactory
|
||||
import com.willfp.eco.internal.scheduling.EcoScheduler
|
||||
import com.willfp.eco.internal.spigot.data.DataYml
|
||||
import com.willfp.eco.internal.spigot.data.KeyRegistry
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.integrations.bstats.MetricHandler
|
||||
import com.willfp.eco.internal.spigot.math.DelegatedExpressionHandler
|
||||
import com.willfp.eco.internal.spigot.math.ImmediatePlaceholderTranslationExpressionHandler
|
||||
@@ -74,7 +72,7 @@ import org.bukkit.inventory.ItemStack
|
||||
import org.bukkit.inventory.meta.SkullMeta
|
||||
import org.bukkit.persistence.PersistentDataContainer
|
||||
import java.net.URLClassLoader
|
||||
import java.util.*
|
||||
import java.util.UUID
|
||||
|
||||
private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
|
||||
|
||||
@@ -82,10 +80,7 @@ private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
|
||||
class EcoImpl : EcoSpigotPlugin(), Eco {
|
||||
override val dataYml = DataYml(this)
|
||||
|
||||
override val profileHandler = ProfileHandler(
|
||||
HandlerType.valueOf(this.configYml.getString("data-handler").uppercase()),
|
||||
this
|
||||
)
|
||||
override val profileHandler = ProfileHandler(this)
|
||||
|
||||
init {
|
||||
getProxy(CommonsInitializerProxy::class.java).init(this)
|
||||
@@ -290,10 +285,10 @@ class EcoImpl : EcoSpigotPlugin(), Eco {
|
||||
bukkitAudiences
|
||||
|
||||
override fun getServerProfile() =
|
||||
profileHandler.loadServerProfile()
|
||||
profileHandler.getServerProfile()
|
||||
|
||||
override fun loadPlayerProfile(uuid: UUID) =
|
||||
profileHandler.load(uuid)
|
||||
profileHandler.getPlayerProfile(uuid)
|
||||
|
||||
override fun createDummyEntity(location: Location): Entity =
|
||||
getProxy(DummyEntityFactoryProxy::class.java).createDummyEntity(location)
|
||||
|
||||
@@ -17,7 +17,6 @@ import com.willfp.eco.core.integrations.mcmmo.McmmoManager
|
||||
import com.willfp.eco.core.integrations.placeholder.PlaceholderManager
|
||||
import com.willfp.eco.core.integrations.shop.ShopManager
|
||||
import com.willfp.eco.core.items.Items
|
||||
import com.willfp.eco.core.items.tag.VanillaItemTag
|
||||
import com.willfp.eco.core.packet.PacketListener
|
||||
import com.willfp.eco.core.particle.Particles
|
||||
import com.willfp.eco.core.price.Prices
|
||||
@@ -62,11 +61,10 @@ import com.willfp.eco.internal.price.PriceFactoryXP
|
||||
import com.willfp.eco.internal.price.PriceFactoryXPLevels
|
||||
import com.willfp.eco.internal.recipes.AutocrafterPatch
|
||||
import com.willfp.eco.internal.spigot.arrows.ArrowDataListener
|
||||
import com.willfp.eco.internal.spigot.data.DataListener
|
||||
import com.willfp.eco.internal.spigot.data.DataYml
|
||||
import com.willfp.eco.internal.spigot.data.PlayerBlockListener
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.ProfileSaver
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileLoadListener
|
||||
import com.willfp.eco.internal.spigot.drops.CollatedRunnable
|
||||
import com.willfp.eco.internal.spigot.eventlisteners.EntityDeathByEntityListeners
|
||||
import com.willfp.eco.internal.spigot.eventlisteners.NaturalExpGainListenersPaper
|
||||
@@ -150,7 +148,7 @@ import org.bukkit.inventory.ItemStack
|
||||
|
||||
abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
abstract val dataYml: DataYml
|
||||
protected abstract val profileHandler: ProfileHandler
|
||||
abstract val profileHandler: ProfileHandler
|
||||
protected var bukkitAudiences: BukkitAudiences? = null
|
||||
|
||||
init {
|
||||
@@ -259,9 +257,6 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
// Init FIS
|
||||
this.getProxy(FastItemStackFactoryProxy::class.java).create(ItemStack(Material.AIR)).unwrap()
|
||||
|
||||
// Preload categorized persistent data keys
|
||||
profileHandler.initialize()
|
||||
|
||||
// Init adventure
|
||||
if (!Prerequisite.HAS_PAPER.isMet) {
|
||||
bukkitAudiences = BukkitAudiences.create(this)
|
||||
@@ -282,14 +277,11 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
override fun createTasks() {
|
||||
CollatedRunnable(this)
|
||||
|
||||
this.scheduler.runLater(3) {
|
||||
profileHandler.migrateIfNeeded()
|
||||
if (!profileHandler.migrateIfNecessary()) {
|
||||
profileHandler.profileWriter.startTickingAutosave()
|
||||
profileHandler.profileWriter.startTickingSaves()
|
||||
}
|
||||
|
||||
profileHandler.startAutosaving()
|
||||
|
||||
ProfileSaver(this, profileHandler).startTicking()
|
||||
|
||||
this.scheduler.runTimer(
|
||||
this.configYml.getInt("display-frame-ttl").toLong(),
|
||||
this.configYml.getInt("display-frame-ttl").toLong(),
|
||||
@@ -428,7 +420,7 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
GUIListener(this),
|
||||
ArrowDataListener(this),
|
||||
ArmorChangeEventListeners(this),
|
||||
DataListener(this, profileHandler),
|
||||
ProfileLoadListener(this, profileHandler),
|
||||
PlayerBlockListener(this),
|
||||
ServerLocking
|
||||
)
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
abstract class EcoProfile(
|
||||
val data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
val uuid: UUID,
|
||||
private val handler: DataHandler,
|
||||
private val localHandler: DataHandler
|
||||
) : Profile {
|
||||
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
|
||||
this.data[key] = value
|
||||
|
||||
CHANGE_MAP.add(uuid)
|
||||
}
|
||||
|
||||
override fun <T : Any> read(key: PersistentDataKey<T>): T {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
if (this.data.containsKey(key)) {
|
||||
return this.data[key] as T
|
||||
}
|
||||
|
||||
this.data[key] = if (key.isSavedLocally) {
|
||||
localHandler.read(uuid, key)
|
||||
} else {
|
||||
handler.read(uuid, key)
|
||||
} ?: key.defaultValue
|
||||
|
||||
return read(key)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is EcoProfile) {
|
||||
return false
|
||||
}
|
||||
|
||||
return this.uuid == other.uuid
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return this.uuid.hashCode()
|
||||
}
|
||||
|
||||
companion object {
|
||||
val CHANGE_MAP: MutableSet<UUID> = ConcurrentHashMap.newKeySet()
|
||||
}
|
||||
}
|
||||
|
||||
class EcoPlayerProfile(
|
||||
data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
uuid: UUID,
|
||||
handler: DataHandler,
|
||||
localHandler: DataHandler
|
||||
) : EcoProfile(data, uuid, handler, localHandler), PlayerProfile {
|
||||
override fun toString(): String {
|
||||
return "EcoPlayerProfile{uuid=$uuid}"
|
||||
}
|
||||
}
|
||||
|
||||
private val serverIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
private val localServerIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "local_server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
class EcoServerProfile(
|
||||
data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
handler: DataHandler,
|
||||
localHandler: DataHandler
|
||||
) : EcoProfile(data, serverProfileUUID, handler, localHandler), ServerProfile {
|
||||
override fun getServerID(): String {
|
||||
if (this.read(serverIDKey).isBlank()) {
|
||||
this.write(serverIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(serverIDKey)
|
||||
}
|
||||
|
||||
override fun getLocalServerID(): String {
|
||||
if (this.read(localServerIDKey).isBlank()) {
|
||||
this.write(localServerIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(localServerIDKey)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "EcoServerProfile"
|
||||
}
|
||||
}
|
||||
|
||||
private val PersistentDataKey<*>.isSavedLocally: Boolean
|
||||
get() = this == localServerIDKey
|
||||
|| EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true
|
||||
|| this.isLocal
|
||||
@@ -1,55 +1,20 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import org.bukkit.NamespacedKey
|
||||
import java.math.BigDecimal
|
||||
|
||||
object KeyRegistry {
|
||||
private val registry = mutableMapOf<NamespacedKey, PersistentDataKey<*>>()
|
||||
|
||||
fun registerKey(key: PersistentDataKey<*>) {
|
||||
if (this.registry.containsKey(key.key)) {
|
||||
this.registry.remove(key.key)
|
||||
if (key.defaultValue == null) {
|
||||
throw IllegalArgumentException("Default value cannot be null!")
|
||||
}
|
||||
|
||||
validateKey(key)
|
||||
|
||||
this.registry[key.key] = key
|
||||
}
|
||||
|
||||
fun getRegisteredKeys(): MutableSet<PersistentDataKey<*>> {
|
||||
return registry.values.toMutableSet()
|
||||
}
|
||||
|
||||
private fun <T> validateKey(key: PersistentDataKey<T>) {
|
||||
val default = key.defaultValue
|
||||
|
||||
when (key.type) {
|
||||
PersistentDataKeyType.INT -> if (default !is Int) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Int")
|
||||
}
|
||||
PersistentDataKeyType.DOUBLE -> if (default !is Double) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Double")
|
||||
}
|
||||
PersistentDataKeyType.BOOLEAN -> if (default !is Boolean) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Boolean")
|
||||
}
|
||||
PersistentDataKeyType.STRING -> if (default !is String) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be String")
|
||||
}
|
||||
PersistentDataKeyType.STRING_LIST -> if (default !is List<*> || default.firstOrNull() !is String?) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be String List")
|
||||
}
|
||||
PersistentDataKeyType.CONFIG -> if (default !is Config) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Config")
|
||||
}
|
||||
PersistentDataKeyType.BIG_DECIMAL -> if (default !is BigDecimal) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be BigDecimal")
|
||||
}
|
||||
|
||||
else -> throw NullPointerException("Null value found!")
|
||||
}
|
||||
fun getRegisteredKeys(): Set<PersistentDataKey<*>> {
|
||||
return registry.values.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,185 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.profile
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.ServerLocking
|
||||
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
||||
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
|
||||
import org.bukkit.Bukkit
|
||||
import java.util.UUID
|
||||
|
||||
val serverProfileUUID = UUID(0, 0)
|
||||
|
||||
class ProfileHandler(
|
||||
private val type: HandlerType,
|
||||
private val plugin: EcoSpigotPlugin
|
||||
) {
|
||||
private val loaded = mutableMapOf<UUID, EcoProfile>()
|
||||
|
||||
private val localHandler = YamlDataHandler(plugin, this)
|
||||
|
||||
val handler: DataHandler = when (type) {
|
||||
HandlerType.YAML -> localHandler
|
||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||
}
|
||||
|
||||
fun accessLoadedProfile(uuid: UUID): EcoProfile? =
|
||||
loaded[uuid]
|
||||
|
||||
fun loadGenericProfile(uuid: UUID): Profile {
|
||||
val found = loaded[uuid]
|
||||
if (found != null) {
|
||||
return found
|
||||
}
|
||||
|
||||
val data = mutableMapOf<PersistentDataKey<*>, Any>()
|
||||
|
||||
val profile = if (uuid == serverProfileUUID)
|
||||
EcoServerProfile(data, handler, localHandler) else EcoPlayerProfile(data, uuid, handler, localHandler)
|
||||
|
||||
loaded[uuid] = profile
|
||||
return profile
|
||||
}
|
||||
|
||||
fun load(uuid: UUID): PlayerProfile {
|
||||
return loadGenericProfile(uuid) as PlayerProfile
|
||||
}
|
||||
|
||||
fun loadServerProfile(): ServerProfile {
|
||||
return loadGenericProfile(serverProfileUUID) as ServerProfile
|
||||
}
|
||||
|
||||
fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||
val profile = accessLoadedProfile(uuid) ?: return
|
||||
val map = mutableMapOf<PersistentDataKey<*>, Any>()
|
||||
|
||||
for (key in keys) {
|
||||
map[key] = profile.data[key] ?: continue
|
||||
}
|
||||
|
||||
handler.saveKeysFor(uuid, map)
|
||||
|
||||
// Don't save to local handler if it's the same handler.
|
||||
if (localHandler != handler) {
|
||||
localHandler.saveKeysFor(uuid, map)
|
||||
}
|
||||
}
|
||||
|
||||
fun unloadPlayer(uuid: UUID) {
|
||||
loaded.remove(uuid)
|
||||
}
|
||||
|
||||
fun save() {
|
||||
handler.save()
|
||||
|
||||
if (localHandler != handler) {
|
||||
localHandler.save()
|
||||
}
|
||||
}
|
||||
|
||||
fun migrateIfNeeded() {
|
||||
if (!plugin.configYml.getBool("perform-data-migration")) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!plugin.dataYml.has("previous-handler")) {
|
||||
plugin.dataYml.set("previous-handler", type.name)
|
||||
plugin.dataYml.save()
|
||||
}
|
||||
|
||||
|
||||
val previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
|
||||
|
||||
if (previousHandlerType == type) {
|
||||
return
|
||||
}
|
||||
|
||||
val previousHandler = when (previousHandlerType) {
|
||||
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||
}
|
||||
|
||||
ServerLocking.lock("Migrating player data! Check console for more information.")
|
||||
|
||||
plugin.logger.info("eco has detected a change in data handler!")
|
||||
plugin.logger.info("Migrating server data from ${previousHandlerType.name} to ${type.name}")
|
||||
plugin.logger.info("This will take a while!")
|
||||
|
||||
plugin.logger.info("Initializing previous handler...")
|
||||
previousHandler.initialize()
|
||||
|
||||
val players = Bukkit.getOfflinePlayers().map { it.uniqueId }
|
||||
|
||||
plugin.logger.info("Found data for ${players.size} players!")
|
||||
|
||||
/*
|
||||
Declared here as its own function to be able to use T.
|
||||
*/
|
||||
fun <T : Any> migrateKey(uuid: UUID, key: PersistentDataKey<T>, from: DataHandler, to: DataHandler) {
|
||||
val previous: T? = from.read(uuid, key)
|
||||
if (previous != null) {
|
||||
Bukkit.getOfflinePlayer(uuid).profile.write(key, previous) // Nope, no idea.
|
||||
to.write(uuid, key, previous)
|
||||
}
|
||||
}
|
||||
|
||||
var i = 1
|
||||
for (uuid in players) {
|
||||
plugin.logger.info("Migrating data for $uuid... ($i / ${players.size})")
|
||||
for (key in PersistentDataKey.values()) {
|
||||
// Why this? Because known points *really* likes to break things with the legacy MySQL handler.
|
||||
if (key.key.key == "known_points") {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
migrateKey(uuid, key, previousHandler, handler)
|
||||
} catch (e: Exception) {
|
||||
plugin.logger.info("Could not migrate ${key.key} for $uuid! This is probably because they do not have any data.")
|
||||
}
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
plugin.logger.info("Saving new data...")
|
||||
handler.save()
|
||||
plugin.logger.info("Updating previous handler...")
|
||||
plugin.dataYml.set("previous-handler", type.name)
|
||||
plugin.dataYml.save()
|
||||
plugin.logger.info("The server will now automatically be restarted...")
|
||||
|
||||
ServerLocking.unlock()
|
||||
|
||||
Bukkit.getServer().shutdown()
|
||||
}
|
||||
|
||||
fun initialize() {
|
||||
handler.initialize()
|
||||
if (localHandler != handler) {
|
||||
localHandler.initialize()
|
||||
}
|
||||
}
|
||||
|
||||
fun startAutosaving() {
|
||||
if (!plugin.configYml.getBool("yaml.autosave")) {
|
||||
return
|
||||
}
|
||||
|
||||
val interval = plugin.configYml.getInt("yaml.autosave-interval") * 20L
|
||||
|
||||
plugin.scheduler.runTimer(20, interval) {
|
||||
handler.saveAsync()
|
||||
localHandler.saveAsync()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.registry.KRegistrable
|
||||
import com.willfp.eco.core.registry.Registry
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
|
||||
|
||||
abstract class PersistentDataHandlerFactory(
|
||||
override val id: String
|
||||
): KRegistrable {
|
||||
abstract fun create(plugin: EcoSpigotPlugin): PersistentDataHandler
|
||||
}
|
||||
|
||||
object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
|
||||
init {
|
||||
register(object : PersistentDataHandlerFactory("yaml") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
YamlPersistentDataHandler(plugin)
|
||||
})
|
||||
|
||||
register(object : PersistentDataHandlerFactory("mysql") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
|
||||
})
|
||||
|
||||
register(object : PersistentDataHandlerFactory("mongodb") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
})
|
||||
|
||||
// Configs should also accept "mongo"
|
||||
register(object : PersistentDataHandlerFactory("mongo") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,142 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.mongodb.MongoClientSettings
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.BsonArray
|
||||
import org.bson.BsonBoolean
|
||||
import org.bson.BsonDecimal128
|
||||
import org.bson.BsonDocument
|
||||
import org.bson.BsonDouble
|
||||
import org.bson.BsonInt32
|
||||
import org.bson.BsonString
|
||||
import org.bson.BsonValue
|
||||
import org.bson.codecs.configuration.CodecRegistries
|
||||
import org.bson.codecs.pojo.PojoCodecProvider
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class LegacyMongoDBPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("legacy_mongodb") {
|
||||
private val codecRegistry = CodecRegistries.fromRegistries(
|
||||
MongoClientSettings.getDefaultCodecRegistry(),
|
||||
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
|
||||
)
|
||||
|
||||
private val client = MongoClient.create(config.getString("url"))
|
||||
private val database = client.getDatabase(config.getString("database"))
|
||||
|
||||
private val collection = database.getCollection<BsonDocument>("uuidprofile")
|
||||
.withCodecRegistry(codecRegistry)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : LegacyMongoSerializer<String>() {
|
||||
override fun deserialize(value: BsonValue): String {
|
||||
return value.asString().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : LegacyMongoSerializer<Boolean>() {
|
||||
override fun deserialize(value: BsonValue): Boolean {
|
||||
return value.asBoolean().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : LegacyMongoSerializer<Int>() {
|
||||
override fun deserialize(value: BsonValue): Int {
|
||||
return value.asInt32().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : LegacyMongoSerializer<Double>() {
|
||||
override fun deserialize(value: BsonValue): Double {
|
||||
return value.asDouble().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : LegacyMongoSerializer<List<String>>() {
|
||||
override fun deserialize(value: BsonValue): List<String> {
|
||||
return value.asArray().values.map { it.asString().value }
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : LegacyMongoSerializer<BigDecimal>() {
|
||||
override fun deserialize(value: BsonValue): BigDecimal {
|
||||
return value.asDecimal128().value.bigDecimalValue()
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : LegacyMongoSerializer<Config>() {
|
||||
private fun deserializeConfigValue(value: BsonValue): Any {
|
||||
return when (value) {
|
||||
is BsonString -> value.value
|
||||
is BsonInt32 -> value.value
|
||||
is BsonDouble -> value.value
|
||||
is BsonBoolean -> value.value
|
||||
is BsonDecimal128 -> value.value.bigDecimalValue()
|
||||
is BsonArray -> value.values.map { deserializeConfigValue(it) }
|
||||
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }
|
||||
|
||||
else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Config {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return runBlocking {
|
||||
collection.find().toList().map {
|
||||
UUID.fromString(it.getString("_id").value)
|
||||
}.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class LegacyMongoSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
val filter = Filters.eq("_id", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter)
|
||||
.firstOrNull() ?: return@runBlocking null
|
||||
|
||||
val dataMap = profile.getDocument("data")
|
||||
val value = dataMap[key.key.toString()] ?: return@runBlocking null
|
||||
|
||||
try {
|
||||
return@runBlocking deserialize(value)
|
||||
} catch (e: Exception) {
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
throw UnsupportedOperationException("Legacy Mongo does not support writing")
|
||||
}
|
||||
|
||||
protected abstract fun deserialize(value: BsonValue): T
|
||||
}
|
||||
|
||||
object Factory: PersistentDataHandlerFactory("legacy_mongo") {
|
||||
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
|
||||
return LegacyMongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,106 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class LegacyMySQLPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("legacy_mysql") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
private val table = object : UUIDTable("eco_data") {
|
||||
val data = text("json_data", eagerLoading = true)
|
||||
}
|
||||
|
||||
init {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, LegacyMySQLSerializer<String>())
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacyMySQLSerializer<Boolean>())
|
||||
PersistentDataKeyType.INT.registerSerializer(this, LegacyMySQLSerializer<Int>())
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacyMySQLSerializer<Double>())
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacyMySQLSerializer<BigDecimal>())
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacyMySQLSerializer<Config>())
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacyMySQLSerializer<List<String>>())
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll()
|
||||
.map { it[table.id] }
|
||||
.toSet()
|
||||
}.map { it.value }.toSet()
|
||||
}
|
||||
|
||||
private inner class LegacyMySQLSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val json = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { table.id eq uuid }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.data)
|
||||
}
|
||||
|
||||
if (json == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
val data = readConfig(json, ConfigType.JSON)
|
||||
|
||||
val value: Any? = when (key.type) {
|
||||
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return value as? T?
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
throw UnsupportedOperationException("Legacy MySQL does not support writing")
|
||||
}
|
||||
}
|
||||
|
||||
object Factory: PersistentDataHandlerFactory("legacy_mysql") {
|
||||
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
|
||||
return LegacyMySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.mongodb.MongoClientSettings
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.client.model.ReplaceOptions
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.BsonArray
|
||||
import org.bson.BsonBoolean
|
||||
import org.bson.BsonDecimal128
|
||||
import org.bson.BsonDocument
|
||||
import org.bson.BsonDouble
|
||||
import org.bson.BsonInt32
|
||||
import org.bson.BsonObjectId
|
||||
import org.bson.BsonString
|
||||
import org.bson.BsonValue
|
||||
import org.bson.codecs.configuration.CodecRegistries
|
||||
import org.bson.codecs.pojo.PojoCodecProvider
|
||||
import org.bson.types.Decimal128
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class MongoDBPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("mongo") {
|
||||
private val codecRegistry = CodecRegistries.fromRegistries(
|
||||
MongoClientSettings.getDefaultCodecRegistry(),
|
||||
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
|
||||
)
|
||||
|
||||
private val client = MongoClient.create(config.getString("url"))
|
||||
private val database = client.getDatabase(config.getString("database"))
|
||||
|
||||
private val collection = database.getCollection<BsonDocument>(config.getString("collection"))
|
||||
.withCodecRegistry(codecRegistry)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : MongoSerializer<String>() {
|
||||
override fun serialize(value: String): BsonValue {
|
||||
return BsonString(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): String {
|
||||
return value.asString().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : MongoSerializer<Boolean>() {
|
||||
override fun serialize(value: Boolean): BsonValue {
|
||||
return BsonBoolean(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Boolean {
|
||||
return value.asBoolean().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : MongoSerializer<Int>() {
|
||||
override fun serialize(value: Int): BsonValue {
|
||||
return BsonInt32(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Int {
|
||||
return value.asInt32().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : MongoSerializer<Double>() {
|
||||
override fun serialize(value: Double): BsonValue {
|
||||
return BsonDouble(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Double {
|
||||
return value.asDouble().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MongoSerializer<List<String>>() {
|
||||
override fun serialize(value: List<String>): BsonValue {
|
||||
return BsonArray(value.map { BsonString(it) })
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): List<String> {
|
||||
return value.asArray().values.map { it.asString().value }
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : MongoSerializer<BigDecimal>() {
|
||||
override fun serialize(value: BigDecimal): BsonValue {
|
||||
return BsonDecimal128(Decimal128(value))
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): BigDecimal {
|
||||
return value.asDecimal128().value.bigDecimalValue()
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer<Config>() {
|
||||
private fun deserializeConfigValue(value: BsonValue): Any {
|
||||
return when (value) {
|
||||
is BsonString -> value.value
|
||||
is BsonInt32 -> value.value
|
||||
is BsonDouble -> value.value
|
||||
is BsonBoolean -> value.value
|
||||
is BsonDecimal128 -> value.value.bigDecimalValue()
|
||||
is BsonArray -> value.values.map { deserializeConfigValue(it) }
|
||||
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }
|
||||
|
||||
else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
private fun serializeConfigValue(value: Any): BsonValue {
|
||||
return when (value) {
|
||||
is String -> BsonString(value)
|
||||
is Int -> BsonInt32(value)
|
||||
is Double -> BsonDouble(value)
|
||||
is Boolean -> BsonBoolean(value)
|
||||
is BigDecimal -> BsonDecimal128(Decimal128(value))
|
||||
is List<*> -> BsonArray(value.map { serializeConfigValue(it!!) })
|
||||
is Map<*, *> -> BsonDocument().apply {
|
||||
value.forEach { (k, v) -> append(k.toString(), serializeConfigValue(v!!)) }
|
||||
}
|
||||
|
||||
else -> throw IllegalArgumentException("Could not serialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
override fun serialize(value: Config): BsonValue {
|
||||
return serializeConfigValue(value.toMap())
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Config {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return runBlocking {
|
||||
collection.find().toList().map {
|
||||
UUID.fromString(it.getString("uuid").value)
|
||||
}.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class MongoSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
val filter = Filters.eq("uuid", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter)
|
||||
.firstOrNull() ?: return@runBlocking null
|
||||
|
||||
val value = profile[key.key.toString()] ?: return@runBlocking null
|
||||
|
||||
deserialize(value)
|
||||
}
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
runBlocking {
|
||||
val filter = Filters.eq("uuid", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter).firstOrNull()
|
||||
?: BsonDocument()
|
||||
.append("_id", BsonObjectId())
|
||||
.append("uuid", BsonString(uuid.toString()))
|
||||
|
||||
profile.append(key.key.toString(), serialize(value))
|
||||
|
||||
collection.replaceOne(
|
||||
filter,
|
||||
profile,
|
||||
ReplaceOptions().upsert(true)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract fun serialize(value: T): BsonValue
|
||||
protected abstract fun deserialize(value: BsonValue): T
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,267 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.greaterEq
|
||||
import org.jetbrains.exposed.sql.Table
|
||||
import org.jetbrains.exposed.sql.and
|
||||
import org.jetbrains.exposed.sql.deleteWhere
|
||||
import org.jetbrains.exposed.sql.replace
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.jetbrains.exposed.sql.upsert
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
import kotlin.math.pow
|
||||
|
||||
private const val VALUE_COLUMN_NAME = "dataValue"
|
||||
private const val UUID_COLUMN_NAME = "profileUUID"
|
||||
private const val KEY_COLUMN_NAME = "dataKey"
|
||||
private const val INDEX_COLUMN_NAME = "listIndex"
|
||||
|
||||
class MySQLPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("mysql") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val prefix = config.getString("prefix")
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : DirectStoreSerializer<String>() {
|
||||
override val table = object : KeyTable<String>("string") {
|
||||
override val value = varchar(VALUE_COLUMN_NAME, 256)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : DirectStoreSerializer<Boolean>() {
|
||||
override val table = object : KeyTable<Boolean>("boolean") {
|
||||
override val value = bool(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : DirectStoreSerializer<Int>() {
|
||||
override val table = object : KeyTable<Int>("int") {
|
||||
override val value = integer(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : DirectStoreSerializer<Double>() {
|
||||
override val table = object : KeyTable<Double>("double") {
|
||||
override val value = double(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : DirectStoreSerializer<BigDecimal>() {
|
||||
override val table = object : KeyTable<BigDecimal>("big_decimal") {
|
||||
// 34 digits of precision, 4 digits of scale
|
||||
override val value = decimal(VALUE_COLUMN_NAME, 34, 4)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : SingleValueSerializer<Config, String>() {
|
||||
override val table = object : KeyTable<String>("config") {
|
||||
override val value = text(VALUE_COLUMN_NAME)
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: String): Config {
|
||||
return readConfig(value, ConfigType.JSON)
|
||||
}
|
||||
|
||||
override fun convertToStored(value: Config): String {
|
||||
// Store config as JSON
|
||||
return if (value.type == ConfigType.JSON) {
|
||||
value.toPlaintext()
|
||||
} else {
|
||||
Configs.fromMap(value.toMap(), ConfigType.JSON).toPlaintext()
|
||||
}
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MultiValueSerializer<String>() {
|
||||
override val table = object : ListKeyTable<String>("string_list") {
|
||||
override val value = varchar(VALUE_COLUMN_NAME, 256)
|
||||
}
|
||||
}.createTable())
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
val savedUUIDs = mutableSetOf<UUID>()
|
||||
|
||||
for (keyType in PersistentDataKeyType.values()) {
|
||||
val serializer = keyType.getSerializer(this) as MySQLSerializer<*>
|
||||
savedUUIDs.addAll(serializer.getSavedUUIDs())
|
||||
}
|
||||
|
||||
return savedUUIDs
|
||||
}
|
||||
|
||||
private abstract inner class MySQLSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
protected abstract val table: ProfileTable
|
||||
|
||||
fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll().map { it[table.uuid] }.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
fun createTable(): MySQLSerializer<T> {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
// T is the key type
|
||||
// S is the stored value type
|
||||
private abstract inner class SingleValueSerializer<T : Any, S : Any> : MySQLSerializer<T>() {
|
||||
abstract override val table: KeyTable<S>
|
||||
|
||||
abstract fun convertToStored(value: T): S
|
||||
abstract fun convertFromStored(value: S): T
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val stored = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.value)
|
||||
}
|
||||
|
||||
return stored?.let { convertFromStored(it) }
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
withRetries {
|
||||
transaction(database) {
|
||||
table.upsert {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.value] = convertToStored(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class DirectStoreSerializer<T : Any> : SingleValueSerializer<T, T>() {
|
||||
override fun convertToStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class MultiValueSerializer<T : Any> : MySQLSerializer<List<T>>() {
|
||||
abstract override val table: ListKeyTable<T>
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<List<T>>): List<T>? {
|
||||
val stored = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.orderBy(table.index)
|
||||
.map { it[table.value] }
|
||||
}
|
||||
|
||||
return stored
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<List<T>>, value: List<T>) {
|
||||
withRetries {
|
||||
transaction(database) {
|
||||
// Remove existing values greater than the new list size
|
||||
table.deleteWhere {
|
||||
(table.uuid eq uuid) and
|
||||
(table.key eq key.key.toString()) and
|
||||
(table.index greaterEq value.size)
|
||||
}
|
||||
|
||||
// Replace existing values in bounds
|
||||
value.forEachIndexed { index, t ->
|
||||
table.replace {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.index] = index
|
||||
it[table.value] = t
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ProfileTable(name: String) : Table(prefix + name) {
|
||||
val uuid = uuid(UUID_COLUMN_NAME)
|
||||
}
|
||||
|
||||
private abstract inner class KeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar(KEY_COLUMN_NAME, 128)
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key)
|
||||
|
||||
init {
|
||||
uniqueIndex(uuid, key)
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ListKeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar(KEY_COLUMN_NAME, 128)
|
||||
val index = integer(INDEX_COLUMN_NAME)
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key, index)
|
||||
|
||||
init {
|
||||
uniqueIndex(uuid, key, index)
|
||||
}
|
||||
}
|
||||
|
||||
private inline fun <T> withRetries(action: () -> T): T? {
|
||||
var retries = 1
|
||||
while (true) {
|
||||
try {
|
||||
return action()
|
||||
} catch (e: Exception) {
|
||||
if (retries > 5) {
|
||||
return null
|
||||
}
|
||||
retries++
|
||||
|
||||
// Exponential backoff
|
||||
runBlocking {
|
||||
delay(2.0.pow(retries.toDouble()).toLong())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class YamlPersistentDataHandler(
|
||||
plugin: EcoSpigotPlugin
|
||||
) : PersistentDataHandler("yaml") {
|
||||
private val dataYml = plugin.dataYml
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : YamlSerializer<String>() {
|
||||
override fun read(config: Config, key: String) = config.getStringOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : YamlSerializer<Boolean>() {
|
||||
override fun read(config: Config, key: String) = config.getBoolOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : YamlSerializer<Int>() {
|
||||
override fun read(config: Config, key: String) = config.getIntOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : YamlSerializer<Double>() {
|
||||
override fun read(config: Config, key: String) = config.getDoubleOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : YamlSerializer<List<String>>() {
|
||||
override fun read(config: Config, key: String) = config.getStringsOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : YamlSerializer<Config>() {
|
||||
override fun read(config: Config, key: String) = config.getSubsectionOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : YamlSerializer<BigDecimal>() {
|
||||
override fun read(config: Config, key: String) = config.getBigDecimalOrNull(key)
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return dataYml.getSubsection("player").getKeys(false)
|
||||
.map { UUID.fromString(it) }
|
||||
.toSet()
|
||||
}
|
||||
|
||||
override fun shouldAutosave(): Boolean {
|
||||
return true
|
||||
}
|
||||
|
||||
override fun doSave() {
|
||||
dataYml.save()
|
||||
}
|
||||
|
||||
private abstract inner class YamlSerializer<T: Any>: DataTypeSerializer<T>() {
|
||||
protected abstract fun read(config: Config, key: String): T?
|
||||
|
||||
final override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return read(dataYml, "player.$uuid.${key.key}")
|
||||
}
|
||||
|
||||
final override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
dataYml.set("player.$uuid.${key.key}", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.ServerLocking
|
||||
import com.willfp.eco.internal.spigot.data.KeyRegistry
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoServerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.serverProfileUUID
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
const val LEGACY_MIGRATED_KEY = "legacy-data-migrated"
|
||||
|
||||
class ProfileHandler(
|
||||
private val plugin: EcoSpigotPlugin
|
||||
) {
|
||||
private val handlerId = plugin.configYml.getString("data-handler")
|
||||
|
||||
val localHandler = YamlPersistentDataHandler(plugin)
|
||||
val defaultHandler = PersistentDataHandlers[handlerId]?.create(plugin)
|
||||
?: throw IllegalArgumentException("Invalid data handler ($handlerId)")
|
||||
|
||||
val profileWriter = ProfileWriter(plugin, this)
|
||||
|
||||
private val loaded = ConcurrentHashMap<UUID, EcoProfile>()
|
||||
|
||||
fun getPlayerProfile(uuid: UUID): EcoPlayerProfile {
|
||||
return loaded.computeIfAbsent(uuid) {
|
||||
EcoPlayerProfile(it, this)
|
||||
} as EcoPlayerProfile
|
||||
}
|
||||
|
||||
fun getServerProfile(): EcoServerProfile {
|
||||
return loaded.computeIfAbsent(serverProfileUUID) {
|
||||
EcoServerProfile(this)
|
||||
} as EcoServerProfile
|
||||
}
|
||||
|
||||
fun unloadProfile(uuid: UUID) {
|
||||
loaded.remove(uuid)
|
||||
}
|
||||
|
||||
fun save() {
|
||||
localHandler.shutdown()
|
||||
defaultHandler.shutdown()
|
||||
}
|
||||
|
||||
fun migrateIfNecessary(): Boolean {
|
||||
if (!plugin.configYml.getBool("perform-data-migration")) {
|
||||
return false
|
||||
}
|
||||
|
||||
// First install
|
||||
if (!plugin.dataYml.has("previous-handler")) {
|
||||
plugin.dataYml.set("previous-handler", defaultHandler.id)
|
||||
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
|
||||
plugin.dataYml.save()
|
||||
return false
|
||||
}
|
||||
|
||||
val previousHandlerId = plugin.dataYml.getString("previous-handler").lowercase()
|
||||
if (previousHandlerId != defaultHandler.id) {
|
||||
val fromFactory = PersistentDataHandlers[previousHandlerId] ?: return false
|
||||
scheduleMigration(fromFactory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
|
||||
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
|
||||
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (defaultHandler is MongoDBPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
|
||||
plugin.logger.info("eco has detected a legacy MongoDB database. Migrating to new MongoDB database...")
|
||||
scheduleMigration(LegacyMongoDBPersistentDataHandler.Factory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
private fun scheduleMigration(fromFactory: PersistentDataHandlerFactory) {
|
||||
ServerLocking.lock("Migrating player data! Check console for more information.")
|
||||
|
||||
// Run after 5 ticks to allow plugins to load their data keys
|
||||
plugin.scheduler.runLater(5) {
|
||||
doMigrate(fromFactory)
|
||||
|
||||
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
|
||||
plugin.dataYml.save()
|
||||
}
|
||||
}
|
||||
|
||||
private fun doMigrate(fromFactory: PersistentDataHandlerFactory) {
|
||||
plugin.logger.info("eco has detected a change in data handler")
|
||||
plugin.logger.info("${fromFactory.id} --> ${defaultHandler.id}")
|
||||
plugin.logger.info("This will take a while! Players will not be able to join during this time.")
|
||||
|
||||
val fromHandler = fromFactory.create(plugin)
|
||||
val toHandler = defaultHandler
|
||||
|
||||
val keys = KeyRegistry.getRegisteredKeys()
|
||||
|
||||
plugin.logger.info("Keys to migrate: ${keys.map { it.key }.joinToString(", ") }}")
|
||||
|
||||
plugin.logger.info("Loading profile UUIDs from ${fromFactory.id}...")
|
||||
plugin.logger.info("This step may take a while depending on the size of your database.")
|
||||
|
||||
val uuids = fromHandler.getSavedUUIDs()
|
||||
|
||||
plugin.logger.info("Found ${uuids.size} profiles to migrate")
|
||||
|
||||
for ((index, uuid) in uuids.withIndex()) {
|
||||
plugin.logger.info("(${index + 1}/${uuids.size}) Migrating $uuid")
|
||||
val profile = fromHandler.serializeProfile(uuid, keys)
|
||||
toHandler.loadSerializedProfile(profile)
|
||||
}
|
||||
|
||||
plugin.logger.info("Profile writes submitted! Waiting for completion...")
|
||||
toHandler.shutdown()
|
||||
|
||||
plugin.logger.info("Updating previous handler...")
|
||||
plugin.dataYml.set("previous-handler", handlerId)
|
||||
plugin.dataYml.save()
|
||||
plugin.logger.info("The server will now automatically be restarted...")
|
||||
|
||||
plugin.server.shutdown()
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.util.PlayerUtils
|
||||
@@ -9,15 +9,18 @@ import org.bukkit.event.player.PlayerJoinEvent
|
||||
import org.bukkit.event.player.PlayerLoginEvent
|
||||
import org.bukkit.event.player.PlayerQuitEvent
|
||||
|
||||
class DataListener(
|
||||
class ProfileLoadListener(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : Listener {
|
||||
@EventHandler(priority = EventPriority.LOWEST)
|
||||
fun onLogin(event: PlayerLoginEvent) {
|
||||
handler.unloadProfile(event.player.uniqueId)
|
||||
}
|
||||
|
||||
@EventHandler(priority = EventPriority.HIGHEST)
|
||||
fun onLeave(event: PlayerQuitEvent) {
|
||||
val profile = handler.accessLoadedProfile(event.player.uniqueId) ?: return
|
||||
handler.saveKeysFor(event.player.uniqueId, profile.data.keys)
|
||||
handler.unloadPlayer(event.player.uniqueId)
|
||||
handler.unloadProfile(event.player.uniqueId)
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
@@ -26,9 +29,4 @@ class DataListener(
|
||||
PlayerUtils.updateSavedDisplayName(event.player)
|
||||
}
|
||||
}
|
||||
|
||||
@EventHandler(priority = EventPriority.LOWEST)
|
||||
fun onLogin(event: PlayerLoginEvent) {
|
||||
handler.unloadPlayer(event.player.uniqueId)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
/*
|
||||
The profile writer exists as an optimization to batch writes to the database.
|
||||
|
||||
This is necessary because values frequently change multiple times per tick,
|
||||
and we don't want to write to the database every time a value changes.
|
||||
|
||||
Instead, we only commit the last value that was set every interval (default 1 tick).
|
||||
*/
|
||||
|
||||
|
||||
class ProfileWriter(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) {
|
||||
private val saveInterval = plugin.configYml.getInt("save-interval").toLong()
|
||||
private val autosaveInterval = plugin.configYml.getInt("autosave-interval").toLong()
|
||||
private val valuesToWrite = ConcurrentHashMap<WriteRequest<*>, Any>()
|
||||
|
||||
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
valuesToWrite[WriteRequest(uuid, key)] = value
|
||||
}
|
||||
|
||||
fun startTickingSaves() {
|
||||
plugin.scheduler.runTimer(20, saveInterval) {
|
||||
val iterator = valuesToWrite.iterator()
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
val (request, value) = iterator.next()
|
||||
iterator.remove()
|
||||
|
||||
val dataHandler = if (request.key.isSavedLocally) handler.localHandler else handler.defaultHandler
|
||||
|
||||
// Pass the value to the data handler
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
dataHandler.write(request.uuid, request.key as PersistentDataKey<Any>, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun startTickingAutosave() {
|
||||
plugin.scheduler.runTimer(autosaveInterval, autosaveInterval) {
|
||||
if (handler.localHandler.shouldAutosave()) {
|
||||
handler.localHandler.save()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private data class WriteRequest<T>(val uuid: UUID, val key: PersistentDataKey<T>)
|
||||
}
|
||||
|
||||
val PersistentDataKey<*>.isSavedLocally: Boolean
|
||||
get() = this.isLocal || EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true
|
||||
@@ -0,0 +1,14 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import java.util.UUID
|
||||
|
||||
class EcoPlayerProfile(
|
||||
uuid: UUID,
|
||||
handler: ProfileHandler
|
||||
) : EcoProfile(uuid, handler), PlayerProfile {
|
||||
override fun toString(): String {
|
||||
return "EcoPlayerProfile{uuid=$uuid}"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.isSavedLocally
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
abstract class EcoProfile(
|
||||
val uuid: UUID,
|
||||
private val handler: ProfileHandler
|
||||
) : Profile {
|
||||
private val data = ConcurrentHashMap<PersistentDataKey<*>, Any>()
|
||||
|
||||
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
|
||||
this.data[key] = value
|
||||
|
||||
handler.profileWriter.write(uuid, key, value)
|
||||
}
|
||||
|
||||
override fun <T : Any> read(key: PersistentDataKey<T>): T {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
if (this.data.containsKey(key)) {
|
||||
return this.data[key] as T
|
||||
}
|
||||
|
||||
this.data[key] = if (key.isSavedLocally) {
|
||||
handler.localHandler.read(uuid, key)
|
||||
} else {
|
||||
handler.defaultHandler.read(uuid, key)
|
||||
} ?: key.defaultValue
|
||||
|
||||
return read(key)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is EcoProfile) {
|
||||
return false
|
||||
}
|
||||
|
||||
return this.uuid == other.uuid
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return this.uuid.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import java.util.UUID
|
||||
|
||||
val serverIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
val localServerIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "local_server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
"",
|
||||
true
|
||||
)
|
||||
|
||||
val serverProfileUUID = UUID(0, 0)
|
||||
|
||||
class EcoServerProfile(
|
||||
handler: ProfileHandler
|
||||
) : EcoProfile(serverProfileUUID, handler), ServerProfile {
|
||||
override fun getServerID(): String {
|
||||
if (this.read(serverIDKey).isBlank()) {
|
||||
this.write(serverIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(serverIDKey)
|
||||
}
|
||||
|
||||
override fun getLocalServerID(): String {
|
||||
if (this.read(localServerIDKey).isBlank()) {
|
||||
this.write(localServerIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(localServerIDKey)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "EcoServerProfile"
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import java.util.UUID
|
||||
|
||||
abstract class DataHandler(
|
||||
val type: HandlerType
|
||||
) {
|
||||
/**
|
||||
* Read value from a key.
|
||||
*/
|
||||
abstract fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T?
|
||||
|
||||
/**
|
||||
* Write value to a key.
|
||||
*/
|
||||
abstract fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T)
|
||||
|
||||
/**
|
||||
* Save a set of keys for a given UUID.
|
||||
*/
|
||||
abstract fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>)
|
||||
|
||||
// Everything below this are methods that are only needed for certain implementations.
|
||||
|
||||
open fun save() {
|
||||
|
||||
}
|
||||
|
||||
open fun saveAsync() {
|
||||
|
||||
}
|
||||
|
||||
open fun initialize() {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
enum class HandlerType {
|
||||
YAML,
|
||||
MYSQL,
|
||||
MONGO
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.client.model.ReplaceOptions
|
||||
import com.mongodb.client.model.UpdateOptions
|
||||
import com.mongodb.client.model.Updates
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.mongodb.kotlin.client.coroutine.MongoCollection
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.codecs.pojo.annotations.BsonId
|
||||
import java.util.UUID
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.serialization.Contextual
|
||||
import kotlinx.serialization.SerialName
|
||||
import kotlinx.serialization.Serializable
|
||||
import org.bukkit.Bukkit
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class MongoDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.MONGO) {
|
||||
private val client: MongoClient
|
||||
private val collection: MongoCollection<UUIDProfile>
|
||||
|
||||
private val scope = CoroutineScope(Dispatchers.IO)
|
||||
|
||||
init {
|
||||
System.setProperty(
|
||||
"org.litote.mongo.mapping.service",
|
||||
"org.litote.kmongo.jackson.JacksonClassMappingTypeService"
|
||||
)
|
||||
|
||||
val url = plugin.configYml.getString("mongodb.url")
|
||||
|
||||
client = MongoClient.create(url)
|
||||
collection = client.getDatabase(plugin.configYml.getString("mongodb.database"))
|
||||
.getCollection<UUIDProfile>("uuidprofile") // Compat with jackson mapping
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
doRead(uuid, key)
|
||||
}
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
scope.launch {
|
||||
doWrite(uuid, key, value)
|
||||
}
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
scope.launch {
|
||||
for ((key, value) in keys) {
|
||||
saveKey(uuid, key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun <T : Any> saveKey(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
|
||||
val data = value as T
|
||||
doWrite(uuid, key, data)
|
||||
}
|
||||
|
||||
private suspend fun <T> doWrite(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
val profile = getOrCreateDocument(uuid)
|
||||
|
||||
profile.data.run {
|
||||
if (value == null) {
|
||||
this.remove(key.key.toString())
|
||||
} else {
|
||||
this[key.key.toString()] = value
|
||||
}
|
||||
}
|
||||
|
||||
collection.updateOne(
|
||||
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
|
||||
Updates.set(UUIDProfile::data.name, profile.data)
|
||||
)
|
||||
}
|
||||
|
||||
private suspend fun <T> doRead(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
|
||||
.firstOrNull() ?: return key.defaultValue
|
||||
return profile.data[key.key.toString()] as? T?
|
||||
}
|
||||
|
||||
private suspend fun getOrCreateDocument(uuid: UUID): UUIDProfile {
|
||||
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
|
||||
.firstOrNull()
|
||||
return if (profile == null) {
|
||||
val toInsert = UUIDProfile(
|
||||
uuid.toString(),
|
||||
mutableMapOf()
|
||||
)
|
||||
|
||||
collection.replaceOne(
|
||||
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
|
||||
toInsert,
|
||||
ReplaceOptions().upsert(true)
|
||||
)
|
||||
toInsert
|
||||
} else {
|
||||
profile
|
||||
}
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is MongoDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
|
||||
@Serializable
|
||||
internal data class UUIDProfile(
|
||||
// Storing UUID as strings for serialization
|
||||
@SerialName("_id") val uuid: String,
|
||||
// Storing NamespacedKeys as strings for serialization
|
||||
val data: MutableMap<String, @Contextual Any>
|
||||
)
|
||||
@@ -1,169 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.ResultRow
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.TextColumnType
|
||||
import org.jetbrains.exposed.sql.insert
|
||||
import org.jetbrains.exposed.sql.select
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.jetbrains.exposed.sql.update
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
/*
|
||||
Better than old MySQL data handler, but that's only because it's literally just dumping all the
|
||||
data into a single text column, containing the contents of the players profile as a Config.
|
||||
|
||||
Whatever. At least it works.
|
||||
*/
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class MySQLDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.MYSQL) {
|
||||
private val database: Database
|
||||
private val table = UUIDTable("eco_data")
|
||||
|
||||
private val rows = Caffeine.newBuilder()
|
||||
.expireAfterWrite(3, TimeUnit.SECONDS)
|
||||
.build<UUID, ResultRow>()
|
||||
|
||||
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
|
||||
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
|
||||
|
||||
private val dataColumn: Column<String>
|
||||
get() = table.columns.first { it.name == "json_data" } as Column<String>
|
||||
|
||||
init {
|
||||
val config = HikariConfig()
|
||||
config.driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
config.username = plugin.configYml.getString("mysql.user")
|
||||
config.password = plugin.configYml.getString("mysql.password")
|
||||
config.jdbcUrl = "jdbc:mysql://" +
|
||||
"${plugin.configYml.getString("mysql.host")}:" +
|
||||
"${plugin.configYml.getString("mysql.port")}/" +
|
||||
plugin.configYml.getString("mysql.database")
|
||||
config.maximumPoolSize = plugin.configYml.getInt("mysql.connections")
|
||||
|
||||
database = Database.connect(HikariDataSource(config))
|
||||
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
|
||||
table.apply {
|
||||
registerColumn<String>("json_data", TextColumnType())
|
||||
}
|
||||
|
||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||
}
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val data = getData(uuid)
|
||||
|
||||
val value: Any? = when (key.type) {
|
||||
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
return value as? T?
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
val data = getData(uuid)
|
||||
data.set(key.key.toString(), value)
|
||||
|
||||
setData(uuid, data)
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
executor.submit {
|
||||
val data = getData(uuid)
|
||||
|
||||
for ((key, value) in keys) {
|
||||
data.set(key.key.toString(), value)
|
||||
}
|
||||
|
||||
doSetData(uuid, data)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getData(uuid: UUID): Config {
|
||||
val plaintext = transaction(database) {
|
||||
val row = rows.get(uuid) {
|
||||
val row = table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||
|
||||
if (row != null) {
|
||||
row
|
||||
} else {
|
||||
transaction(database) {
|
||||
table.insert {
|
||||
it[id] = uuid
|
||||
it[dataColumn] = "{}"
|
||||
}
|
||||
}
|
||||
table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||
}
|
||||
}
|
||||
|
||||
row.getOrNull(dataColumn) ?: "{}"
|
||||
}
|
||||
|
||||
return readConfig(plaintext, ConfigType.JSON)
|
||||
}
|
||||
|
||||
private fun setData(uuid: UUID, config: Config) {
|
||||
executor.submit {
|
||||
doSetData(uuid, config)
|
||||
}
|
||||
}
|
||||
|
||||
private fun doSetData(uuid: UUID, config: Config) {
|
||||
transaction(database) {
|
||||
table.update({ table.id eq uuid }) {
|
||||
it[dataColumn] = config.toPlaintext()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun initialize() {
|
||||
transaction(database) {
|
||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||
}
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is MySQLDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.internal.spigot.data.EcoProfile
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
|
||||
class ProfileSaver(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) {
|
||||
fun startTicking() {
|
||||
val interval = plugin.configYml.getInt("save-interval").toLong()
|
||||
|
||||
plugin.scheduler.runTimer(20, interval) {
|
||||
val iterator = EcoProfile.CHANGE_MAP.iterator()
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
val uuid = iterator.next()
|
||||
iterator.remove()
|
||||
|
||||
val profile = handler.accessLoadedProfile(uuid) ?: continue
|
||||
|
||||
handler.saveKeysFor(uuid, profile.data.keys)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import org.bukkit.NamespacedKey
|
||||
import java.util.UUID
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class YamlDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.YAML) {
|
||||
private val dataYml = plugin.dataYml
|
||||
|
||||
override fun save() {
|
||||
dataYml.save()
|
||||
}
|
||||
|
||||
override fun saveAsync() {
|
||||
dataYml.saveAsync()
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
// Separate `as T?` for each branch to prevent compiler warnings.
|
||||
val value = when (key.type) {
|
||||
PersistentDataKeyType.INT -> dataYml.getIntOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.DOUBLE -> dataYml.getDoubleOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING -> dataYml.getStringOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BOOLEAN -> dataYml.getBoolOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING_LIST -> dataYml.getStringsOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.CONFIG -> dataYml.getSubsectionOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BIG_DECIMAL -> dataYml.getBigDecimalOrNull("player.$uuid.${key.key}") as T?
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
doWrite(uuid, key.key, value)
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
for ((key, value) in keys) {
|
||||
doWrite(uuid, key.key, value)
|
||||
}
|
||||
}
|
||||
|
||||
private fun doWrite(uuid: UUID, key: NamespacedKey, value: Any) {
|
||||
dataYml.set("player.$uuid.$key", value)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is YamlDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,8 @@
|
||||
|
||||
# How player/server data is saved:
|
||||
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
|
||||
# mongo - If you're running on a network (Bungee/Velocity), you should use MongoDB if you can.
|
||||
# mysql - The alternative to MongoDB. Because of how eco data works, MongoDB is the best option; but use this if you can't.
|
||||
# mysql - Standard database, great option for multi-node servers (i.e. BungeeCord/Velocity)
|
||||
# mongodb - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
|
||||
data-handler: yaml
|
||||
|
||||
# If data should be migrated automatically when changing data handler.
|
||||
@@ -16,25 +16,26 @@ perform-data-migration: true
|
||||
mongodb:
|
||||
# The full MongoDB connection URL.
|
||||
url: ""
|
||||
|
||||
# The name of the database to use.
|
||||
database: "eco"
|
||||
database: eco
|
||||
|
||||
# The collection to use for player data.
|
||||
collection: profiles
|
||||
|
||||
mysql:
|
||||
# How many threads to execute statements on. Higher numbers can be faster however
|
||||
# very high numbers can cause issues with OS configuration. If writes are taking
|
||||
# too long, increase this value.
|
||||
threads: 2
|
||||
# The table prefix to use for all tables.
|
||||
prefix: "eco_"
|
||||
|
||||
# The maximum number of MySQL connections.
|
||||
connections: 10
|
||||
|
||||
# Connection details for MySQL.
|
||||
host: localhost
|
||||
port: 3306
|
||||
database: database
|
||||
user: username
|
||||
password: passy
|
||||
|
||||
yaml:
|
||||
autosave: true # If data should be saved automatically
|
||||
autosave-interval: 1800 # How often data should be saved (in seconds)
|
||||
password: p4ssw0rd
|
||||
|
||||
# How many ticks to wait between committing data to a database. This doesn't
|
||||
# affect yaml storage, only MySQL and MongoDB. By default, data is committed
|
||||
@@ -42,6 +43,9 @@ yaml:
|
||||
# would be committing once a second.
|
||||
save-interval: 1
|
||||
|
||||
# How many ticks to wait between autosaves for data.yml.
|
||||
autosave-interval: 36000 # 30 minutes
|
||||
|
||||
# Options to manage the conflict finder
|
||||
conflicts:
|
||||
whitelist: # Plugins that should never be marked as conflicts
|
||||
@@ -101,7 +105,7 @@ math-cache-ttl: 200
|
||||
# The time (in minutes) for literal patterns to be cached for. Higher values will lead to
|
||||
# faster evaluation times (less CPU usage) at the expense of slightly more memory usage and
|
||||
# less reactive values. (Do not change unless you are told to).
|
||||
literal-cache-ttl: 1
|
||||
literal-cache-ttl: 10
|
||||
|
||||
# If anonymous usage statistics should be tracked. This is very valuable information as it
|
||||
# helps understand how eco and other plugins are being used by logging player and server
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
version = 6.73.2
|
||||
version = 6.74.0
|
||||
kotlin.incremental.useClasspathSnapshot=false
|
||||
Reference in New Issue
Block a user