Compare commits
1 Commits
6.74.2
...
custom-blo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a8935f06d |
@@ -14,6 +14,7 @@ plugins {
|
||||
id("maven-publish")
|
||||
id("java")
|
||||
kotlin("jvm") version "1.9.21"
|
||||
kotlin("plugin.serialization") version "1.9.21"
|
||||
}
|
||||
|
||||
dependencies {
|
||||
@@ -40,6 +41,7 @@ allprojects {
|
||||
apply(plugin = "maven-publish")
|
||||
apply(plugin = "io.github.goooler.shadow")
|
||||
apply(plugin = "kotlin")
|
||||
apply(plugin = "org.jetbrains.kotlin.plugin.serialization")
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
@@ -61,7 +63,7 @@ allprojects {
|
||||
maven("https://repo.extendedclip.com/content/repositories/placeholderapi/")
|
||||
|
||||
// ProtocolLib
|
||||
maven("https://repo.dmulloy2.net/nexus/repository/public/")
|
||||
//maven("https://repo.dmulloy2.net/nexus/repository/public/")
|
||||
|
||||
// WorldGuard
|
||||
maven("https://maven.enginehub.org/repo/")
|
||||
@@ -210,6 +212,7 @@ tasks {
|
||||
//relocate("com.mysql", "com.willfp.eco.libs.mysql")
|
||||
relocate("com.mongodb", "com.willfp.eco.libs.mongodb")
|
||||
relocate("org.bson", "com.willfp.eco.libs.bson")
|
||||
relocate("org.litote", "com.willfp.eco.libs.litote")
|
||||
relocate("org.reactivestreams", "com.willfp.eco.libs.reactivestreams")
|
||||
relocate("reactor.", "com.willfp.eco.libs.reactor.") // Dot in name to be safe
|
||||
relocate("com.moandjiezana.toml", "com.willfp.eco.libs.toml")
|
||||
|
||||
149
eco-api/src/main/java/com/willfp/eco/core/blocks/Blocks.java
Normal file
149
eco-api/src/main/java/com/willfp/eco/core/blocks/Blocks.java
Normal file
@@ -0,0 +1,149 @@
|
||||
package com.willfp.eco.core.blocks;
|
||||
|
||||
import com.willfp.eco.core.blocks.impl.EmptyTestableBlock;
|
||||
import com.willfp.eco.core.blocks.impl.MaterialTestableBlock;
|
||||
import com.willfp.eco.core.blocks.impl.UnrestrictedMaterialTestableBlock;
|
||||
import com.willfp.eco.core.blocks.provider.BlockProvider;
|
||||
import com.willfp.eco.util.NamespacedKeyUtils;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.Material;
|
||||
import org.bukkit.NamespacedKey;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* Class to manage all custom and vanilla blocks.
|
||||
*/
|
||||
public final class Blocks {
|
||||
/**
|
||||
* All entities.
|
||||
*/
|
||||
private static final Map<NamespacedKey, TestableBlock> REGISTRY = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* All block providers.
|
||||
*/
|
||||
private static final Map<String, BlockProvider> PROVIDERS = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* The lookup handler.
|
||||
*/
|
||||
private static final BlocksLookupHandler BLOCKS_LOOKUP_HANDLER = new BlocksLookupHandler(Blocks::doParse);
|
||||
|
||||
/**
|
||||
* Register a new custom block.
|
||||
*
|
||||
* @param key The key of the block.
|
||||
* @param block The block.
|
||||
*/
|
||||
public static void registerCustomBlock(@NotNull final NamespacedKey key,
|
||||
@NotNull final TestableBlock block) {
|
||||
REGISTRY.put(key, block);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new block provider.
|
||||
*
|
||||
* @param provider The provider.
|
||||
*/
|
||||
public static void registerBlockProvider(@NotNull final BlockProvider provider) {
|
||||
PROVIDERS.put(provider.getNamespace(), provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a block.
|
||||
*
|
||||
* @param key The key of the block.
|
||||
*/
|
||||
public static void removeCustomBlock(@NotNull final NamespacedKey key) {
|
||||
REGISTRY.remove(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the backbone of the eco block system.
|
||||
* <p>
|
||||
* You can look up a TestableBlock for any material or custom block,
|
||||
* and it will return it.
|
||||
* <p>
|
||||
* If you want to get a Block instance from this, then just call
|
||||
* {@link TestableBlock#place(Location)}.
|
||||
*
|
||||
* @param key The lookup string.
|
||||
* @return The testable block, or an empty testable block if not found.
|
||||
*/
|
||||
@NotNull
|
||||
public static TestableBlock lookup(@NotNull final String key) {
|
||||
return BLOCKS_LOOKUP_HANDLER.parseKey(key);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static TestableBlock doParse(@NotNull final String[] args) {
|
||||
if (args.length == 0) {
|
||||
return new EmptyTestableBlock();
|
||||
}
|
||||
|
||||
String[] split = args[0].toLowerCase().split(":");
|
||||
if (split.length == 1) {
|
||||
if (args[0].startsWith("*")) {
|
||||
Material type = Material.getMaterial(args[0].substring(1));
|
||||
return (type == null) ? new EmptyTestableBlock() : new UnrestrictedMaterialTestableBlock(type);
|
||||
} else {
|
||||
Material type = Material.getMaterial(args[0].toUpperCase());
|
||||
return (type == null) ? new EmptyTestableBlock() : new MaterialTestableBlock(type);
|
||||
}
|
||||
}
|
||||
|
||||
NamespacedKey namespacedKey = NamespacedKeyUtils.create(split[0], split[1]);
|
||||
TestableBlock block = REGISTRY.get(namespacedKey);
|
||||
|
||||
if (block != null) {
|
||||
return block;
|
||||
}
|
||||
|
||||
BlockProvider provider = PROVIDERS.get(split[0]);
|
||||
if (provider == null) {
|
||||
return new EmptyTestableBlock();
|
||||
}
|
||||
|
||||
block = provider.provideForKey(split[1]);
|
||||
if (block == null) {
|
||||
return new EmptyTestableBlock();
|
||||
}
|
||||
|
||||
registerCustomBlock(namespacedKey, block);
|
||||
return block;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get if block is a custom block.
|
||||
*
|
||||
* @param block The block to check.
|
||||
* @return If is custom.
|
||||
*/
|
||||
public static boolean isCustomBlock(@NotNull final Block block) {
|
||||
for (TestableBlock testable : REGISTRY.values()) {
|
||||
if (testable.matches(block)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered custom blocks.
|
||||
*
|
||||
* @return A set of all blocks.
|
||||
*/
|
||||
public static Set<TestableBlock> getCustomBlocks() {
|
||||
return new HashSet<>(REGISTRY.values());
|
||||
}
|
||||
|
||||
private Blocks() {
|
||||
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package com.willfp.eco.core.blocks;
|
||||
|
||||
import com.willfp.eco.core.blocks.impl.EmptyTestableBlock;
|
||||
import com.willfp.eco.core.blocks.impl.GroupedTestableBlocks;
|
||||
import com.willfp.eco.core.lookup.LookupHandler;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Handle block lookup strings.
|
||||
*/
|
||||
public class BlocksLookupHandler implements LookupHandler<TestableBlock> {
|
||||
/**
|
||||
* The parser.
|
||||
*/
|
||||
private final Function<String[], @NotNull TestableBlock> parser;
|
||||
|
||||
/**
|
||||
* Create new lookup handler.
|
||||
*
|
||||
* @param parser The parser.
|
||||
*/
|
||||
public BlocksLookupHandler(@NotNull final Function<String[], @NotNull TestableBlock> parser) {
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TestableBlock parse(@NotNull final String[] args) {
|
||||
return parser.apply(args);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate(@NotNull final TestableBlock object) {
|
||||
return !(object instanceof EmptyTestableBlock);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TestableBlock getFailsafe() {
|
||||
return new EmptyTestableBlock();
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull TestableBlock join(@NotNull final Collection<TestableBlock> options) {
|
||||
return new GroupedTestableBlocks(options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
package com.willfp.eco.core.blocks;
|
||||
|
||||
import org.apache.commons.lang.Validate;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.NamespacedKey;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* A custom block has 3 components.
|
||||
*
|
||||
* <ul>
|
||||
* <li>The key to identify it</li>
|
||||
* <li>The test to check if any block is this custom block</li>
|
||||
* <li>The supplier to spawn the custom {@link Block}</li>
|
||||
* </ul>
|
||||
*/
|
||||
public class CustomBlock implements TestableBlock {
|
||||
/**
|
||||
* The key.
|
||||
*/
|
||||
private final NamespacedKey key;
|
||||
|
||||
/**
|
||||
* The test for block to pass.
|
||||
*/
|
||||
private final Predicate<@NotNull Block> test;
|
||||
|
||||
/**
|
||||
* The provider to spawn the block.
|
||||
*/
|
||||
private final Function<Location, Block> provider;
|
||||
|
||||
/**
|
||||
* Create a new custom block.
|
||||
*
|
||||
* @param key The block key.
|
||||
* @param test The test.
|
||||
* @param provider The provider to spawn the block.
|
||||
*/
|
||||
public CustomBlock(@NotNull final NamespacedKey key,
|
||||
@NotNull final Predicate<@NotNull Block> test,
|
||||
@NotNull final Function<Location, Block> provider) {
|
||||
this.key = key;
|
||||
this.test = test;
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(@Nullable final Block other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return test.test(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Block place(@NotNull final Location location) {
|
||||
Validate.notNull(location.getWorld());
|
||||
|
||||
return provider.apply(location);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register the block.
|
||||
*/
|
||||
public void register() {
|
||||
Blocks.registerCustomBlock(this.getKey(), this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the key.
|
||||
*
|
||||
* @return The key.
|
||||
*/
|
||||
public NamespacedKey getKey() {
|
||||
return this.key;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package com.willfp.eco.core.blocks;
|
||||
|
||||
import com.willfp.eco.core.lookup.Testable;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.block.Block;
|
||||
import org.bukkit.entity.Entity;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* A block with a test.
|
||||
*/
|
||||
public interface TestableBlock extends Testable<Block> {
|
||||
/**
|
||||
* If a Block matches the test.
|
||||
*
|
||||
* @param other The other block.
|
||||
* @return If the block matches.
|
||||
*/
|
||||
@Override
|
||||
boolean matches(@Nullable Block other);
|
||||
|
||||
/**
|
||||
* Place the block.
|
||||
*
|
||||
* @param location The location.
|
||||
* @return The block.
|
||||
*/
|
||||
@NotNull
|
||||
Block place(@NotNull Location location);
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.willfp.eco.core.blocks.impl;
|
||||
|
||||
import com.willfp.eco.core.blocks.TestableBlock;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* Empty block.
|
||||
*/
|
||||
public class EmptyTestableBlock implements TestableBlock {
|
||||
/**
|
||||
* Create a new empty testable block.
|
||||
*/
|
||||
public EmptyTestableBlock() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(@Nullable final Block other) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Block place(@NotNull final Location location) {
|
||||
return location.getBlock();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
package com.willfp.eco.core.blocks.impl;
|
||||
|
||||
import com.willfp.eco.core.blocks.TestableBlock;
|
||||
import com.willfp.eco.util.NumberUtils;
|
||||
import org.apache.commons.lang.Validate;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* A group of testable blocks.
|
||||
*/
|
||||
public class GroupedTestableBlocks implements TestableBlock {
|
||||
/**
|
||||
* The children.
|
||||
*/
|
||||
private final Collection<TestableBlock> children;
|
||||
|
||||
/**
|
||||
* Create a new group of testable blocks.
|
||||
*
|
||||
* @param children The children.
|
||||
*/
|
||||
public GroupedTestableBlocks(@NotNull final Collection<TestableBlock> children) {
|
||||
Validate.isTrue(!children.isEmpty(), "Group must have at least one child!");
|
||||
|
||||
this.children = children;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(@Nullable final Block other) {
|
||||
for (TestableBlock child : children) {
|
||||
if (child.matches(other)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Block place(@NotNull final Location location) {
|
||||
return new ArrayList<>(children)
|
||||
.get(NumberUtils.randInt(0, children.size() - 1))
|
||||
.place(location);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the children.
|
||||
*
|
||||
* @return The children.
|
||||
*/
|
||||
public Collection<TestableBlock> getChildren() {
|
||||
return new ArrayList<>(children);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
package com.willfp.eco.core.blocks.impl;
|
||||
|
||||
import com.willfp.eco.core.blocks.Blocks;
|
||||
import com.willfp.eco.core.blocks.TestableBlock;
|
||||
import org.apache.commons.lang.Validate;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.Material;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* A testable block for vanilla materials.
|
||||
*/
|
||||
public class MaterialTestableBlock implements TestableBlock {
|
||||
/**
|
||||
* The block type.
|
||||
*/
|
||||
private final Material material;
|
||||
|
||||
/**
|
||||
* Create a new unrestricted material testable block.
|
||||
*
|
||||
* @param material The material.
|
||||
*/
|
||||
public MaterialTestableBlock(@NotNull final Material material) {
|
||||
this.material = material;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(@Nullable final Block block) {
|
||||
boolean simpleMatches = block != null && block.getType() == material;
|
||||
|
||||
if (!simpleMatches) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !Blocks.isCustomBlock(block);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Block place(@NotNull Location location) {
|
||||
Validate.notNull(location.getWorld());
|
||||
|
||||
Block block = location.getWorld().getBlockAt(location);
|
||||
block.setType(material);
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the material.
|
||||
*
|
||||
* @return The material.
|
||||
*/
|
||||
public Material getMaterial() {
|
||||
return this.material;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
package com.willfp.eco.core.blocks.impl;
|
||||
|
||||
import com.willfp.eco.core.blocks.TestableBlock;
|
||||
import org.apache.commons.lang.Validate;
|
||||
import org.bukkit.Location;
|
||||
import org.bukkit.Material;
|
||||
import org.bukkit.block.Block;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* A testable block for materials regardless of data.
|
||||
*/
|
||||
public class UnrestrictedMaterialTestableBlock implements TestableBlock {
|
||||
/**
|
||||
* The block type.
|
||||
*/
|
||||
private final Material material;
|
||||
|
||||
/**
|
||||
* Create a new unrestricted material testable block.
|
||||
*
|
||||
* @param material The material.
|
||||
*/
|
||||
public UnrestrictedMaterialTestableBlock(@NotNull final Material material) {
|
||||
this.material = material;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(@Nullable final Block other) {
|
||||
return other != null && other.getType() == material;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Block place(@NotNull Location location) {
|
||||
Validate.notNull(location.getWorld());
|
||||
|
||||
Block block = location.getWorld().getBlockAt(location);
|
||||
block.setType(material);
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the material.
|
||||
*
|
||||
* @return The material.
|
||||
*/
|
||||
public Material getMaterial() {
|
||||
return this.material;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
package com.willfp.eco.core.blocks.provider;
|
||||
|
||||
import com.willfp.eco.core.blocks.TestableBlock;
|
||||
import com.willfp.eco.core.registry.Registry;
|
||||
import org.apache.commons.lang.Validate;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
/**
|
||||
* Block providers are call-site registrations for blocks. In other words,
|
||||
* they only register their blocks when a request is made. This is marginally
|
||||
* slower, however it is required for certain plugins, and fixes bugs related to
|
||||
* loading orders.
|
||||
*
|
||||
* @see TestableBlock
|
||||
*/
|
||||
public abstract class BlockProvider {
|
||||
/**
|
||||
* The namespace.
|
||||
*/
|
||||
private final String namespace;
|
||||
|
||||
/**
|
||||
* Create a new BlockProvider for a specific namespace.
|
||||
*
|
||||
* @param namespace The namespace.
|
||||
*/
|
||||
protected BlockProvider(@NotNull final String namespace) {
|
||||
this.namespace = namespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a TestableBlock for a given key.
|
||||
*
|
||||
* @param key The block ID.
|
||||
* @return The TestableBlock, or null if not found.
|
||||
*/
|
||||
@Nullable
|
||||
public abstract TestableBlock provideForKey(@NotNull String key);
|
||||
|
||||
/**
|
||||
* Get the namespace.
|
||||
*
|
||||
* @return The namespace.
|
||||
*/
|
||||
public String getNamespace() {
|
||||
return this.namespace;
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Handles data read/write for a {@link com.willfp.eco.core.data.keys.PersistentDataKeyType} for a specific
|
||||
* data handler.
|
||||
*
|
||||
* @param <T> The type of data.
|
||||
*/
|
||||
public abstract class DataTypeSerializer<T> {
|
||||
/**
|
||||
* Create a new data type serializer.
|
||||
*/
|
||||
protected DataTypeSerializer() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @return The value.
|
||||
*/
|
||||
@Nullable
|
||||
public abstract T readAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key);
|
||||
|
||||
/**
|
||||
* Write a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param value The value.
|
||||
*/
|
||||
public abstract void writeAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value);
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import com.willfp.eco.core.registry.Registrable;
|
||||
import com.willfp.eco.core.tuples.Pair;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Handles persistent data.
|
||||
*/
|
||||
public abstract class PersistentDataHandler implements Registrable {
|
||||
/**
|
||||
* The id.
|
||||
*/
|
||||
private final String id;
|
||||
|
||||
/**
|
||||
* The executor.
|
||||
*/
|
||||
private final ExecutorService executor = Executors.newCachedThreadPool();
|
||||
|
||||
/**
|
||||
* Create a new persistent data handler.
|
||||
*
|
||||
* @param id The id.
|
||||
*/
|
||||
protected PersistentDataHandler(@NotNull final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all UUIDs with saved data.
|
||||
* <p>
|
||||
* This is a blocking operation.
|
||||
*
|
||||
* @return All saved UUIDs.
|
||||
*/
|
||||
public abstract Set<UUID> getSavedUUIDs();
|
||||
|
||||
/**
|
||||
* Save to disk.
|
||||
* <p>
|
||||
* If write commits to disk, this method does not need to be overridden.
|
||||
* <p>
|
||||
* This method is called asynchronously.
|
||||
*/
|
||||
protected void doSave() {
|
||||
// Save to disk
|
||||
}
|
||||
|
||||
/**
|
||||
* If the handler should autosave.
|
||||
*
|
||||
* @return If the handler should autosave.
|
||||
*/
|
||||
public boolean shouldAutosave() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the data.
|
||||
*/
|
||||
public final void save() {
|
||||
executor.submit(this::doSave);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a key from persistent data.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param <T> The type of the key.
|
||||
* @return The value, or null if not found.
|
||||
*/
|
||||
@Nullable
|
||||
public final <T> T read(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
Future<T> future = executor.submit(() -> serializer.readAsync(uuid, key));
|
||||
|
||||
try {
|
||||
return future.get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a key to persistent data.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param value The value.
|
||||
* @param <T> The type of the key.
|
||||
*/
|
||||
public final <T> void write(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
executor.submit(() -> serializer.writeAsync(uuid, key, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize profile.
|
||||
*
|
||||
* @param uuid The uuid to serialize.
|
||||
* @param keys The keys to serialize.
|
||||
* @return The serialized data.
|
||||
*/
|
||||
@NotNull
|
||||
public final SerializedProfile serializeProfile(@NotNull final UUID uuid,
|
||||
@NotNull final Set<PersistentDataKey<?>> keys) {
|
||||
Map<PersistentDataKey<?>, CompletableFuture<Object>> futures = keys.stream()
|
||||
.collect(Collectors.toMap(
|
||||
key -> key,
|
||||
key -> CompletableFuture.supplyAsync(() -> read(uuid, key), executor)
|
||||
));
|
||||
|
||||
Map<PersistentDataKey<?>, Object> data = futures.entrySet().stream()
|
||||
.map(entry -> new Pair<PersistentDataKey<?>, Object>(entry.getKey(), entry.getValue().join()))
|
||||
.filter(entry -> entry.getSecond() != null)
|
||||
.collect(Collectors.toMap(Pair::getFirst, Pair::getSecond));
|
||||
|
||||
return new SerializedProfile(uuid, data);
|
||||
}
|
||||
|
||||
/**`
|
||||
* Load profile data.
|
||||
*
|
||||
* @param profile The profile.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public final void loadSerializedProfile(@NotNull final SerializedProfile profile) {
|
||||
for (Map.Entry<PersistentDataKey<?>, Object> entry : profile.data().entrySet()) {
|
||||
PersistentDataKey<?> key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
// This cast is safe because the data is serialized
|
||||
write(profile.uuid(), (PersistentDataKey<? super Object>) key, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save and shutdown the handler.
|
||||
*
|
||||
* @throws InterruptedException If the writes could not be awaited.
|
||||
*/
|
||||
public final void shutdown() throws InterruptedException {
|
||||
doSave();
|
||||
|
||||
if (executor.isShutdown()) {
|
||||
return;
|
||||
}
|
||||
|
||||
executor.shutdown();
|
||||
while (!executor.awaitTermination(2, TimeUnit.MINUTES)) {
|
||||
// Wait
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@NotNull
|
||||
public final String getID() {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Serialized profile.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param data The data.
|
||||
*/
|
||||
public record SerializedProfile(
|
||||
@NotNull UUID uuid,
|
||||
@NotNull Map<PersistentDataKey<?>, Object> data
|
||||
) {
|
||||
|
||||
}
|
||||
@@ -34,19 +34,6 @@ public final class PersistentDataKey<T> {
|
||||
*/
|
||||
private final boolean isLocal;
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
* @param key The key.
|
||||
* @param type The data type.
|
||||
* @param defaultValue The default value.
|
||||
*/
|
||||
public PersistentDataKey(@NotNull final NamespacedKey key,
|
||||
@NotNull final PersistentDataKeyType<T> type,
|
||||
@NotNull final T defaultValue) {
|
||||
this(key, type, defaultValue, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
@@ -67,6 +54,24 @@ public final class PersistentDataKey<T> {
|
||||
Eco.get().registerPersistentKey(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Persistent Data Key.
|
||||
*
|
||||
* @param key The key.
|
||||
* @param type The data type.
|
||||
* @param defaultValue The default value.
|
||||
*/
|
||||
public PersistentDataKey(@NotNull final NamespacedKey key,
|
||||
@NotNull final PersistentDataKeyType<T> type,
|
||||
@NotNull final T defaultValue) {
|
||||
this.key = key;
|
||||
this.defaultValue = defaultValue;
|
||||
this.type = type;
|
||||
this.isLocal = false;
|
||||
|
||||
Eco.get().registerPersistentKey(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PersistentDataKey{"
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
package com.willfp.eco.core.data.keys;
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config;
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer;
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@@ -65,58 +60,24 @@ public final class PersistentDataKeyType<T> {
|
||||
*/
|
||||
private final String name;
|
||||
|
||||
/**
|
||||
* The serializers for this key type.
|
||||
*/
|
||||
private final Map<PersistentDataHandler, DataTypeSerializer<T>> serializers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Create new PersistentDataKeyType.
|
||||
*
|
||||
* @param name The name.
|
||||
*/
|
||||
private PersistentDataKeyType(@NotNull final String name) {
|
||||
VALUES.add(this);
|
||||
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the key type.
|
||||
*
|
||||
* @return The name.
|
||||
*/
|
||||
@NotNull
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a serializer for this key type.
|
||||
* Create new PersistentDataKeyType.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @param serializer The serializer.
|
||||
* @param name The name.
|
||||
*/
|
||||
public void registerSerializer(@NotNull final PersistentDataHandler handler,
|
||||
@NotNull final DataTypeSerializer<T> serializer) {
|
||||
this.serializers.put(handler, serializer);
|
||||
}
|
||||
private PersistentDataKeyType(@NotNull final String name) {
|
||||
VALUES.add(this);
|
||||
|
||||
/**
|
||||
* Get the serializer for a handler.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @return The serializer.
|
||||
*/
|
||||
@NotNull
|
||||
public DataTypeSerializer<T> getSerializer(@NotNull final PersistentDataHandler handler) {
|
||||
DataTypeSerializer<T> serializer = this.serializers.get(handler);
|
||||
|
||||
if (serializer == null) {
|
||||
throw new NoSuchElementException("No serializer for handler: " + handler);
|
||||
}
|
||||
|
||||
return serializer;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -11,22 +11,19 @@ import org.bukkit.entity.Raider;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Allows an entity to attack the closest target within a given subset of specific target types.
|
||||
*
|
||||
* @param targets The type of entities to attack.
|
||||
* @param target The type of entities to attack.
|
||||
* @param checkVisibility If visibility should be checked.
|
||||
* @param checkCanNavigate If navigation should be checked.
|
||||
* @param reciprocalChance 1 in reciprocalChance chance of not activating on any tick.
|
||||
* @param targetFilter The filter for targets to match.
|
||||
*/
|
||||
public record TargetGoalNearestAttackable(
|
||||
@NotNull Set<TestableEntity> targets,
|
||||
@NotNull TestableEntity target,
|
||||
boolean checkVisibility,
|
||||
boolean checkCanNavigate,
|
||||
int reciprocalChance,
|
||||
@@ -35,16 +32,16 @@ public record TargetGoalNearestAttackable(
|
||||
/**
|
||||
* Create a new target goal.
|
||||
*
|
||||
* @param targets The type of entities to attack.
|
||||
* @param target The type of entities to attack.
|
||||
* @param checkVisibility If visibility should be checked.
|
||||
* @param checkCanNavigate If navigation should be checked.
|
||||
* @param reciprocalChance 1 in reciprocalChance chance of not activating on any tick.
|
||||
*/
|
||||
public TargetGoalNearestAttackable(@NotNull final Set<TestableEntity> targets,
|
||||
public TargetGoalNearestAttackable(@NotNull final TestableEntity target,
|
||||
final boolean checkVisibility,
|
||||
final boolean checkCanNavigate,
|
||||
final int reciprocalChance) {
|
||||
this(targets, checkVisibility, checkCanNavigate, reciprocalChance, it -> true);
|
||||
this(target, checkVisibility, checkCanNavigate, reciprocalChance, it -> true);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -68,15 +65,11 @@ public record TargetGoalNearestAttackable(
|
||||
return null;
|
||||
}
|
||||
|
||||
Set<TestableEntity> targets = config.getStrings("target").stream()
|
||||
.map(Entities::lookup)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if (config.has("targetFilter")) {
|
||||
TestableEntity filter = Entities.lookup(config.getString("targetFilter"));
|
||||
|
||||
return new TargetGoalNearestAttackable(
|
||||
targets,
|
||||
Entities.lookup(config.getString("target")),
|
||||
config.getBool("checkVisibility"),
|
||||
config.getBool("checkCanNavigate"),
|
||||
config.getInt("reciprocalChance"),
|
||||
@@ -84,7 +77,7 @@ public record TargetGoalNearestAttackable(
|
||||
);
|
||||
} else {
|
||||
return new TargetGoalNearestAttackable(
|
||||
targets,
|
||||
Entities.lookup(config.getString("target")),
|
||||
config.getBool("checkVisibility"),
|
||||
config.getBool("checkCanNavigate"),
|
||||
config.getInt("reciprocalChance")
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
package com.willfp.eco.core.integrations.customblocks;
|
||||
|
||||
import com.willfp.eco.core.integrations.Integration;
|
||||
|
||||
/**
|
||||
* Wrapper class for custom block integrations.
|
||||
*/
|
||||
public interface CustomBlocksIntegration extends Integration {
|
||||
/**
|
||||
* Register all the custom block for a specific plugin into eco.
|
||||
*
|
||||
* @see com.willfp.eco.core.blocks.Blocks
|
||||
*/
|
||||
default void registerAllBlocks() {
|
||||
// Override when needed.
|
||||
}
|
||||
|
||||
/**
|
||||
* Register {@link com.willfp.eco.core.blocks.provider.BlockProvider}s.
|
||||
*/
|
||||
default void registerProvider() {
|
||||
// Override when needed.
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
package com.willfp.eco.core.integrations.customblocks;
|
||||
|
||||
import com.willfp.eco.core.integrations.IntegrationRegistry;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
/**
|
||||
* Class to handle custom block integrations.
|
||||
*/
|
||||
public final class CustomBlocksManager {
|
||||
/**
|
||||
* A set of all registered integrations.
|
||||
*/
|
||||
private static final IntegrationRegistry<CustomBlocksIntegration> REGISTRY = new IntegrationRegistry<>();
|
||||
|
||||
/**
|
||||
* Register a new integration.
|
||||
*
|
||||
* @param integration The integration to register.
|
||||
*/
|
||||
public static void register(@NotNull final CustomBlocksIntegration integration) {
|
||||
REGISTRY.register(integration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register all the custom block for a specific plugin into eco.
|
||||
*
|
||||
* @see com.willfp.eco.core.blocks.Blocks
|
||||
*/
|
||||
public static void registerAllBlocks() {
|
||||
REGISTRY.forEachSafely(CustomBlocksIntegration::registerAllBlocks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register all the custom blocks for a specific plugin into eco.
|
||||
*
|
||||
* @see com.willfp.eco.core.blocks.Blocks
|
||||
*/
|
||||
public static void registerProviders() {
|
||||
REGISTRY.forEachSafely(CustomBlocksIntegration::registerProvider);
|
||||
}
|
||||
|
||||
private CustomBlocksManager() {
|
||||
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package com.willfp.eco.core.proxy;
|
||||
|
||||
import com.willfp.eco.core.version.Version;
|
||||
import org.bukkit.Bukkit;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -36,13 +35,6 @@ public final class ProxyConstants {
|
||||
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
|
||||
}
|
||||
|
||||
private static String convertVersion(@NotNull final String version) {
|
||||
return switch (version) {
|
||||
case "v1_21_1" -> "v1_21";
|
||||
default -> version;
|
||||
};
|
||||
}
|
||||
|
||||
static {
|
||||
String currentMinecraftVersion = Bukkit.getServer().getBukkitVersion().split("-")[0];
|
||||
String nmsVersion;
|
||||
@@ -53,6 +45,6 @@ public final class ProxyConstants {
|
||||
nmsVersion = "v" + currentMinecraftVersion.replace(".", "_");
|
||||
}
|
||||
|
||||
NMS_VERSION = convertVersion(nmsVersion);
|
||||
NMS_VERSION = nmsVersion;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
package com.willfp.eco.core.entities
|
||||
|
||||
import com.willfp.eco.core.entities.ai.EntityController
|
||||
import com.willfp.eco.core.items.Items
|
||||
import com.willfp.eco.core.items.TestableItem
|
||||
import org.bukkit.entity.Mob
|
||||
import org.bukkit.inventory.ItemStack
|
||||
|
||||
/** @see EntityController.getFor */
|
||||
val <T : Mob> T.controller: EntityController<T>
|
||||
|
||||
@@ -3,7 +3,7 @@ version = rootProject.version
|
||||
|
||||
dependencies {
|
||||
compileOnly(project(":eco-core:core-backend"))
|
||||
compileOnly("io.papermc.paper:paper-api:1.21.1-R0.1-SNAPSHOT")
|
||||
compileOnly("io.papermc.paper:paper-api:1.21-R0.1-SNAPSHOT")
|
||||
}
|
||||
|
||||
tasks {
|
||||
|
||||
@@ -7,7 +7,7 @@ dependencies {
|
||||
implementation("org.objenesis:objenesis:3.2")
|
||||
|
||||
compileOnly("io.papermc.paper:paper-api:1.20.2-R0.1-SNAPSHOT")
|
||||
compileOnly("me.clip:placeholderapi:2.11.6")
|
||||
compileOnly("me.clip:placeholderapi:2.11.4")
|
||||
compileOnly("net.kyori:adventure-text-minimessage:4.10.0")
|
||||
compileOnly("net.kyori:adventure-platform-bukkit:4.1.0")
|
||||
compileOnly("org.yaml:snakeyaml:1.33")
|
||||
|
||||
@@ -14,16 +14,12 @@ object ArgParserEnchantment : LookupArgParser {
|
||||
val enchants = mutableMapOf<Enchantment, Int>()
|
||||
|
||||
for (arg in args) {
|
||||
try {
|
||||
val argSplit = arg.split(":")
|
||||
val argSplit = arg.split(":")
|
||||
|
||||
val enchant = Enchantment.getByKey(NamespacedKey.minecraft(argSplit[0].lowercase())) ?: continue
|
||||
val level = argSplit.getOrNull(1)?.toIntOrNull() ?: enchant.maxLevel
|
||||
val enchant = Enchantment.getByKey(NamespacedKey.minecraft(argSplit[0].lowercase())) ?: continue
|
||||
val level = argSplit.getOrNull(1)?.toIntOrNull() ?: enchant.maxLevel
|
||||
|
||||
enchants[enchant] = level
|
||||
} catch (e: IllegalArgumentException) {
|
||||
continue
|
||||
}
|
||||
enchants[enchant] = level
|
||||
}
|
||||
|
||||
if (enchants.isEmpty()) {
|
||||
|
||||
@@ -12,7 +12,7 @@ object ParticleFactoryRGB : ParticleFactory {
|
||||
if (Prerequisite.HAS_1_20_5.isMet) {
|
||||
Particle.valueOf("DUST")
|
||||
} else {
|
||||
Particle.valueOf("REDSTONE")
|
||||
Particle.valueOf("REDSTONE_DUST")
|
||||
}
|
||||
}.getOrNull()
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ var SkullMeta.texture: String?
|
||||
* at java.lang.String.checkBoundsBeginEnd(String.java:4604) ~[?:?]
|
||||
* at java.lang.String.substring(String.java:2707) ~[?:?]
|
||||
* at java.lang.String.substring(String.java:2680) ~[?:?]
|
||||
* at com.willfp.eco.internal.spigot.proxy.v1_19_R1.common.SkullKt.setTexture(ModernSkull.kt:36)
|
||||
* at com.willfp.eco.internal.spigot.proxy.v1_19_R1.common.SkullKt.setTexture(Skull.kt:36)
|
||||
*
|
||||
if (base64.length < 20) {
|
||||
return
|
||||
|
||||
@@ -54,6 +54,8 @@ class EcoEntityController<T : Mob>(
|
||||
priority, goal.getGoalFactory()?.create(goal, nms) ?: return this
|
||||
)
|
||||
|
||||
nms.targetSelector
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.willfp.eco.internal.spigot.proxy.common.ai.target
|
||||
|
||||
import com.willfp.eco.core.entities.ai.target.TargetGoalNearestAttackable
|
||||
import com.willfp.eco.core.lookup.matches
|
||||
import com.willfp.eco.internal.spigot.proxy.common.ai.TargetGoalFactory
|
||||
import com.willfp.eco.internal.spigot.proxy.common.toBukkitEntity
|
||||
import net.minecraft.world.entity.LivingEntity
|
||||
@@ -18,9 +17,7 @@ object NearestAttackableGoalFactory : TargetGoalFactory<TargetGoalNearestAttacka
|
||||
apiGoal.checkVisibility,
|
||||
apiGoal.checkCanNavigate,
|
||||
) {
|
||||
val bukkit = it.toBukkitEntity()
|
||||
|
||||
apiGoal.targetFilter.test(bukkit) && apiGoal.targets.any { t -> t.matches(bukkit) }
|
||||
apiGoal.targetFilter.test(it.toBukkitEntity()) && apiGoal.target.matches(it.toBukkitEntity())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,19 +3,12 @@ package com.willfp.eco.internal.spigot.proxy.common.packet.display
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.packet.PacketEvent
|
||||
import com.willfp.eco.core.packet.PacketListener
|
||||
import com.willfp.eco.internal.spigot.proxy.common.toResourceLocation
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import net.minecraft.network.protocol.game.ClientboundPlaceGhostRecipePacket
|
||||
import net.minecraft.resources.ResourceLocation
|
||||
|
||||
class PacketAutoRecipe(
|
||||
private val plugin: EcoPlugin
|
||||
) : PacketListener {
|
||||
private val fKey = ClientboundPlaceGhostRecipePacket::class.java
|
||||
.declaredFields
|
||||
.first { it.type == ResourceLocation::class.java }
|
||||
.apply { isAccessible = true }
|
||||
|
||||
override fun onSend(event: PacketEvent) {
|
||||
val packet = event.packet.handle as? ClientboundPlaceGhostRecipePacket ?: return
|
||||
|
||||
@@ -31,7 +24,9 @@ class PacketAutoRecipe(
|
||||
return
|
||||
}
|
||||
|
||||
val fKey = packet.javaClass.getDeclaredField("b")
|
||||
fKey.isAccessible = true
|
||||
val key = fKey[packet] as ResourceLocation
|
||||
fKey[packet] = namespacedKeyOf(key.namespace, key.path + "_displayed").toResourceLocation()
|
||||
fKey[packet] = ResourceLocation(key.namespace, key.path + "_displayed")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ version = rootProject.version
|
||||
|
||||
dependencies {
|
||||
compileOnly(project(":eco-core:core-nms:common"))
|
||||
paperweight.paperDevBundle("1.21.1-R0.1-SNAPSHOT")
|
||||
paperweight.paperDevBundle("1.21-R0.1-SNAPSHOT")
|
||||
}
|
||||
|
||||
tasks {
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.proxy.common.modern
|
||||
|
||||
import com.mojang.authlib.GameProfile
|
||||
import com.mojang.authlib.properties.Property
|
||||
import net.minecraft.world.item.component.ResolvableProfile
|
||||
import org.bukkit.inventory.meta.SkullMeta
|
||||
import java.lang.reflect.Field
|
||||
import java.lang.reflect.Method
|
||||
import java.util.UUID
|
||||
|
||||
private lateinit var setProfile: Method
|
||||
private lateinit var profile: Field
|
||||
private lateinit var value: Field
|
||||
|
||||
var SkullMeta.texture: String?
|
||||
get() {
|
||||
if (!::value.isInitialized) {
|
||||
// Doing it this way because Property was changed to be a record and this is
|
||||
// a quick hack to get around that
|
||||
value = Property::class.java.getDeclaredField("value")
|
||||
value.isAccessible = true
|
||||
}
|
||||
|
||||
if (!::profile.isInitialized) {
|
||||
// Assumes instance of CraftMetaSkull; package-private class so can't do manual type check
|
||||
profile = this.javaClass.getDeclaredField("profile")
|
||||
profile.isAccessible = true
|
||||
}
|
||||
|
||||
val profile = profile[this] as ResolvableProfile? ?: return null
|
||||
val properties = profile.properties ?: return null
|
||||
val props = properties["textures"] ?: return null
|
||||
val prop = props.toMutableList().firstOrNull() ?: return null
|
||||
return value[prop] as String?
|
||||
}
|
||||
set(base64) {
|
||||
if (!::setProfile.isInitialized) {
|
||||
// Same here; that's why I can't delegate to a lazy initializer
|
||||
setProfile = this.javaClass.getDeclaredMethod("setProfile", ResolvableProfile::class.java)
|
||||
setProfile.isAccessible = true
|
||||
}
|
||||
|
||||
if (base64 == null || base64.length < 20) {
|
||||
setProfile.invoke(this, null)
|
||||
} else {
|
||||
val uuid = UUID(
|
||||
base64.substring(base64.length - 20).hashCode().toLong(),
|
||||
base64.substring(base64.length - 10).hashCode().toLong()
|
||||
)
|
||||
val profile = GameProfile(uuid, "eco")
|
||||
profile.properties.put("textures", Property("textures", base64))
|
||||
val resolvable = ResolvableProfile(profile)
|
||||
setProfile.invoke(this, resolvable)
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,6 @@ import net.minecraft.util.Unit
|
||||
import net.minecraft.world.item.component.CustomData
|
||||
import net.minecraft.world.item.component.CustomModelData
|
||||
import net.minecraft.world.item.component.ItemLore
|
||||
import net.minecraft.world.item.enchantment.ItemEnchantments
|
||||
import org.bukkit.Bukkit
|
||||
import org.bukkit.craftbukkit.CraftRegistry
|
||||
import org.bukkit.craftbukkit.CraftServer
|
||||
@@ -54,7 +53,7 @@ class NewEcoFastItemStack(
|
||||
private val pdc = (handle.get(DataComponents.CUSTOM_DATA)?.copyTag() ?: CompoundTag()).makePdc()
|
||||
|
||||
override fun getEnchants(checkStored: Boolean): Map<Enchantment, Int> {
|
||||
val enchantments = handle.get(DataComponents.ENCHANTMENTS) ?: ItemEnchantments.EMPTY
|
||||
val enchantments = handle.get(DataComponents.ENCHANTMENTS) ?: return emptyMap()
|
||||
|
||||
val map = mutableMapOf<Enchantment, Int>()
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ version = rootProject.version
|
||||
dependencies {
|
||||
implementation(project(":eco-core:core-nms:modern"))
|
||||
implementation(project(":eco-core:core-nms:common"))
|
||||
paperweight.paperDevBundle("1.21.1-R0.1-SNAPSHOT")
|
||||
paperweight.paperDevBundle("1.21-R0.1-SNAPSHOT")
|
||||
|
||||
implementation("net.kyori:adventure-text-minimessage:4.11.0") {
|
||||
version {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package com.willfp.eco.internal.spigot.proxy.v1_21
|
||||
|
||||
import com.willfp.eco.internal.spigot.proxy.SkullProxy
|
||||
import com.willfp.eco.internal.spigot.proxy.common.modern.texture
|
||||
import com.willfp.eco.internal.spigot.proxy.common.texture
|
||||
import org.bukkit.inventory.meta.SkullMeta
|
||||
|
||||
class Skull : SkullProxy {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import org.gradle.internal.impldep.org.junit.experimental.categories.Categories.CategoryFilter.exclude
|
||||
|
||||
group = "com.willfp"
|
||||
version = rootProject.version
|
||||
|
||||
@@ -7,13 +9,16 @@ dependencies {
|
||||
|
||||
// Libraries
|
||||
implementation("com.github.WillFP:Crunch:1.1.3")
|
||||
implementation("com.mysql:mysql-connector-j:8.4.0")
|
||||
implementation("org.jetbrains.exposed:exposed-core:0.53.0")
|
||||
implementation("org.jetbrains.exposed:exposed-jdbc:0.53.0")
|
||||
implementation("com.zaxxer:HikariCP:5.1.0")
|
||||
implementation("mysql:mysql-connector-java:8.0.25")
|
||||
implementation("org.jetbrains.exposed:exposed-core:0.37.3")
|
||||
implementation("org.jetbrains.exposed:exposed-dao:0.37.3")
|
||||
implementation("org.jetbrains.exposed:exposed-jdbc:0.37.3")
|
||||
implementation("com.zaxxer:HikariCP:5.0.0")
|
||||
implementation("net.kyori:adventure-platform-bukkit:4.1.0")
|
||||
implementation("org.javassist:javassist:3.29.2-GA")
|
||||
implementation("org.mongodb:mongodb-driver-kotlin-coroutine:5.1.2")
|
||||
implementation("org.mongodb:mongodb-driver-kotlin-coroutine:5.0.0")
|
||||
implementation("org.jetbrains.kotlinx:kotlinx-serialization-core:1.5.1")
|
||||
implementation("org.mongodb:bson-kotlinx:5.0.0")
|
||||
implementation("com.moandjiezana.toml:toml4j:0.7.2") {
|
||||
exclude(group = "com.google.code.gson", module = "gson")
|
||||
}
|
||||
@@ -24,7 +29,7 @@ dependencies {
|
||||
compileOnly("io.papermc.paper:paper-api:1.20.2-R0.1-SNAPSHOT")
|
||||
|
||||
// Plugin dependencies
|
||||
compileOnly("com.comphenix.protocol:ProtocolLib:5.1.0")
|
||||
compileOnly("com.comphenix.protocol:ProtocolLib:5.0.0-SNAPSHOT")
|
||||
compileOnly("com.sk89q.worldguard:worldguard-bukkit:7.0.7-SNAPSHOT")
|
||||
compileOnly("com.github.TechFortress:GriefPrevention:16.17.1")
|
||||
compileOnly("com.github.TownyAdvanced:Towny:0.99.5.21") {
|
||||
@@ -35,7 +40,7 @@ dependencies {
|
||||
compileOnly("fr.neatmonster:nocheatplus:3.16.1-SNAPSHOT")
|
||||
compileOnly("com.github.jiangdashao:matrix-api-repo:317d4635fd")
|
||||
compileOnly("com.gmail.nossr50.mcMMO:mcMMO:2.1.202")
|
||||
compileOnly("me.clip:placeholderapi:2.11.6")
|
||||
compileOnly("me.clip:placeholderapi:2.11.4")
|
||||
compileOnly("com.github.brcdev-minecraft:shopgui-api:3.0.0")
|
||||
compileOnly("com.github.LoneDev6:API-ItemsAdder:2.4.7")
|
||||
compileOnly("com.arcaniax:HeadDatabase-API:1.3.1")
|
||||
@@ -71,6 +76,7 @@ dependencies {
|
||||
tasks {
|
||||
shadowJar {
|
||||
minimize {
|
||||
exclude(dependency("org.litote.kmongo:kmongo-coroutine:.*"))
|
||||
exclude(dependency("org.jetbrains.exposed:.*:.*"))
|
||||
exclude(dependency("com.willfp:ModelEngineBridge:.*"))
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.willfp.eco.core.Eco
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.PluginLike
|
||||
import com.willfp.eco.core.PluginProps
|
||||
import com.willfp.eco.core.Prerequisite
|
||||
import com.willfp.eco.core.command.CommandBase
|
||||
import com.willfp.eco.core.command.PluginCommandBase
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
@@ -43,7 +44,8 @@ import com.willfp.eco.internal.proxy.EcoProxyFactory
|
||||
import com.willfp.eco.internal.scheduling.EcoScheduler
|
||||
import com.willfp.eco.internal.spigot.data.DataYml
|
||||
import com.willfp.eco.internal.spigot.data.KeyRegistry
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
||||
import com.willfp.eco.internal.spigot.integrations.bstats.MetricHandler
|
||||
import com.willfp.eco.internal.spigot.math.DelegatedExpressionHandler
|
||||
import com.willfp.eco.internal.spigot.math.ImmediatePlaceholderTranslationExpressionHandler
|
||||
@@ -72,7 +74,7 @@ import org.bukkit.inventory.ItemStack
|
||||
import org.bukkit.inventory.meta.SkullMeta
|
||||
import org.bukkit.persistence.PersistentDataContainer
|
||||
import java.net.URLClassLoader
|
||||
import java.util.UUID
|
||||
import java.util.*
|
||||
|
||||
private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
|
||||
|
||||
@@ -80,7 +82,10 @@ private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
|
||||
class EcoImpl : EcoSpigotPlugin(), Eco {
|
||||
override val dataYml = DataYml(this)
|
||||
|
||||
override val profileHandler = ProfileHandler(this)
|
||||
override val profileHandler = ProfileHandler(
|
||||
HandlerType.valueOf(this.configYml.getString("data-handler").uppercase()),
|
||||
this
|
||||
)
|
||||
|
||||
init {
|
||||
getProxy(CommonsInitializerProxy::class.java).init(this)
|
||||
@@ -285,10 +290,10 @@ class EcoImpl : EcoSpigotPlugin(), Eco {
|
||||
bukkitAudiences
|
||||
|
||||
override fun getServerProfile() =
|
||||
profileHandler.getServerProfile()
|
||||
profileHandler.loadServerProfile()
|
||||
|
||||
override fun loadPlayerProfile(uuid: UUID) =
|
||||
profileHandler.getPlayerProfile(uuid)
|
||||
profileHandler.load(uuid)
|
||||
|
||||
override fun createDummyEntity(location: Location): Entity =
|
||||
getProxy(DummyEntityFactoryProxy::class.java).createDummyEntity(location)
|
||||
|
||||
@@ -9,6 +9,7 @@ import com.willfp.eco.core.integrations.IntegrationLoader
|
||||
import com.willfp.eco.core.integrations.afk.AFKManager
|
||||
import com.willfp.eco.core.integrations.anticheat.AnticheatManager
|
||||
import com.willfp.eco.core.integrations.antigrief.AntigriefManager
|
||||
import com.willfp.eco.core.integrations.customblocks.CustomBlocksManager
|
||||
import com.willfp.eco.core.integrations.customentities.CustomEntitiesManager
|
||||
import com.willfp.eco.core.integrations.customitems.CustomItemsManager
|
||||
import com.willfp.eco.core.integrations.economy.EconomyManager
|
||||
@@ -61,10 +62,11 @@ import com.willfp.eco.internal.price.PriceFactoryXP
|
||||
import com.willfp.eco.internal.price.PriceFactoryXPLevels
|
||||
import com.willfp.eco.internal.recipes.AutocrafterPatch
|
||||
import com.willfp.eco.internal.spigot.arrows.ArrowDataListener
|
||||
import com.willfp.eco.internal.spigot.data.DataListener
|
||||
import com.willfp.eco.internal.spigot.data.DataYml
|
||||
import com.willfp.eco.internal.spigot.data.PlayerBlockListener
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileLoadListener
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.ProfileSaver
|
||||
import com.willfp.eco.internal.spigot.drops.CollatedRunnable
|
||||
import com.willfp.eco.internal.spigot.eventlisteners.EntityDeathByEntityListeners
|
||||
import com.willfp.eco.internal.spigot.eventlisteners.NaturalExpGainListenersPaper
|
||||
@@ -100,6 +102,7 @@ import com.willfp.eco.internal.spigot.integrations.antigrief.AntigriefRPGHorses
|
||||
import com.willfp.eco.internal.spigot.integrations.antigrief.AntigriefSuperiorSkyblock2
|
||||
import com.willfp.eco.internal.spigot.integrations.antigrief.AntigriefTowny
|
||||
import com.willfp.eco.internal.spigot.integrations.antigrief.AntigriefWorldGuard
|
||||
import com.willfp.eco.internal.spigot.integrations.customblocks.CustomBlocksOraxen
|
||||
import com.willfp.eco.internal.spigot.integrations.customentities.CustomEntitiesMythicMobs
|
||||
import com.willfp.eco.internal.spigot.integrations.customitems.CustomItemsCustomCrafting
|
||||
import com.willfp.eco.internal.spigot.integrations.customitems.CustomItemsDenizen
|
||||
@@ -148,7 +151,7 @@ import org.bukkit.inventory.ItemStack
|
||||
|
||||
abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
abstract val dataYml: DataYml
|
||||
abstract val profileHandler: ProfileHandler
|
||||
protected abstract val profileHandler: ProfileHandler
|
||||
protected var bukkitAudiences: BukkitAudiences? = null
|
||||
|
||||
init {
|
||||
@@ -257,6 +260,9 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
// Init FIS
|
||||
this.getProxy(FastItemStackFactoryProxy::class.java).create(ItemStack(Material.AIR)).unwrap()
|
||||
|
||||
// Preload categorized persistent data keys
|
||||
profileHandler.initialize()
|
||||
|
||||
// Init adventure
|
||||
if (!Prerequisite.HAS_PAPER.isMet) {
|
||||
bukkitAudiences = BukkitAudiences.create(this)
|
||||
@@ -277,11 +283,14 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
override fun createTasks() {
|
||||
CollatedRunnable(this)
|
||||
|
||||
if (!profileHandler.migrateIfNecessary()) {
|
||||
profileHandler.profileWriter.startTickingAutosave()
|
||||
profileHandler.profileWriter.startTickingSaves()
|
||||
this.scheduler.runLater(3) {
|
||||
profileHandler.migrateIfNeeded()
|
||||
}
|
||||
|
||||
profileHandler.startAutosaving()
|
||||
|
||||
ProfileSaver(this, profileHandler).startTicking()
|
||||
|
||||
this.scheduler.runTimer(
|
||||
this.configYml.getInt("display-frame-ttl").toLong(),
|
||||
this.configYml.getInt("display-frame-ttl").toLong(),
|
||||
@@ -400,6 +409,9 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
// Placeholder
|
||||
IntegrationLoader("PlaceholderAPI") { PlaceholderManager.addIntegration(PlaceholderIntegrationPAPI()) },
|
||||
|
||||
// Custom Blocks
|
||||
IntegrationLoader("Oraxen") { CustomBlocksManager.register(CustomBlocksOraxen(this)) },
|
||||
|
||||
// Misc
|
||||
IntegrationLoader("mcMMO") { McmmoManager.register(McmmoIntegrationImpl()) },
|
||||
IntegrationLoader("Multiverse-Inventories") {
|
||||
@@ -420,7 +432,7 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
||||
GUIListener(this),
|
||||
ArrowDataListener(this),
|
||||
ArmorChangeEventListeners(this),
|
||||
ProfileLoadListener(this, profileHandler),
|
||||
DataListener(this, profileHandler),
|
||||
PlayerBlockListener(this),
|
||||
ServerLocking
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.util.PlayerUtils
|
||||
@@ -9,18 +9,15 @@ import org.bukkit.event.player.PlayerJoinEvent
|
||||
import org.bukkit.event.player.PlayerLoginEvent
|
||||
import org.bukkit.event.player.PlayerQuitEvent
|
||||
|
||||
class ProfileLoadListener(
|
||||
class DataListener(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : Listener {
|
||||
@EventHandler(priority = EventPriority.LOWEST)
|
||||
fun onLogin(event: PlayerLoginEvent) {
|
||||
handler.unloadProfile(event.player.uniqueId)
|
||||
}
|
||||
|
||||
@EventHandler(priority = EventPriority.HIGHEST)
|
||||
fun onLeave(event: PlayerQuitEvent) {
|
||||
handler.unloadProfile(event.player.uniqueId)
|
||||
val profile = handler.accessLoadedProfile(event.player.uniqueId) ?: return
|
||||
handler.saveKeysFor(event.player.uniqueId, profile.data.keys)
|
||||
handler.unloadPlayer(event.player.uniqueId)
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
@@ -29,4 +26,9 @@ class ProfileLoadListener(
|
||||
PlayerUtils.updateSavedDisplayName(event.player)
|
||||
}
|
||||
}
|
||||
|
||||
@EventHandler(priority = EventPriority.LOWEST)
|
||||
fun onLogin(event: PlayerLoginEvent) {
|
||||
handler.unloadPlayer(event.player.uniqueId)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
abstract class EcoProfile(
|
||||
val data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
val uuid: UUID,
|
||||
private val handler: DataHandler,
|
||||
private val localHandler: DataHandler
|
||||
) : Profile {
|
||||
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
|
||||
this.data[key] = value
|
||||
|
||||
CHANGE_MAP.add(uuid)
|
||||
}
|
||||
|
||||
override fun <T : Any> read(key: PersistentDataKey<T>): T {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
if (this.data.containsKey(key)) {
|
||||
return this.data[key] as T
|
||||
}
|
||||
|
||||
this.data[key] = if (key.isSavedLocally) {
|
||||
localHandler.read(uuid, key)
|
||||
} else {
|
||||
handler.read(uuid, key)
|
||||
} ?: key.defaultValue
|
||||
|
||||
return read(key)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is EcoProfile) {
|
||||
return false
|
||||
}
|
||||
|
||||
return this.uuid == other.uuid
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return this.uuid.hashCode()
|
||||
}
|
||||
|
||||
companion object {
|
||||
val CHANGE_MAP: MutableSet<UUID> = ConcurrentHashMap.newKeySet()
|
||||
}
|
||||
}
|
||||
|
||||
class EcoPlayerProfile(
|
||||
data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
uuid: UUID,
|
||||
handler: DataHandler,
|
||||
localHandler: DataHandler
|
||||
) : EcoProfile(data, uuid, handler, localHandler), PlayerProfile {
|
||||
override fun toString(): String {
|
||||
return "EcoPlayerProfile{uuid=$uuid}"
|
||||
}
|
||||
}
|
||||
|
||||
private val serverIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
private val localServerIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "local_server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
class EcoServerProfile(
|
||||
data: MutableMap<PersistentDataKey<*>, Any>,
|
||||
handler: DataHandler,
|
||||
localHandler: DataHandler
|
||||
) : EcoProfile(data, serverProfileUUID, handler, localHandler), ServerProfile {
|
||||
override fun getServerID(): String {
|
||||
if (this.read(serverIDKey).isBlank()) {
|
||||
this.write(serverIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(serverIDKey)
|
||||
}
|
||||
|
||||
override fun getLocalServerID(): String {
|
||||
if (this.read(localServerIDKey).isBlank()) {
|
||||
this.write(localServerIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(localServerIDKey)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "EcoServerProfile"
|
||||
}
|
||||
}
|
||||
|
||||
private val PersistentDataKey<*>.isSavedLocally: Boolean
|
||||
get() = this == localServerIDKey
|
||||
|| EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true
|
||||
|| this.isLocal
|
||||
@@ -1,20 +1,55 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import org.bukkit.NamespacedKey
|
||||
import java.math.BigDecimal
|
||||
|
||||
object KeyRegistry {
|
||||
private val registry = mutableMapOf<NamespacedKey, PersistentDataKey<*>>()
|
||||
|
||||
fun registerKey(key: PersistentDataKey<*>) {
|
||||
if (key.defaultValue == null) {
|
||||
throw IllegalArgumentException("Default value cannot be null!")
|
||||
if (this.registry.containsKey(key.key)) {
|
||||
this.registry.remove(key.key)
|
||||
}
|
||||
|
||||
validateKey(key)
|
||||
|
||||
this.registry[key.key] = key
|
||||
}
|
||||
|
||||
fun getRegisteredKeys(): Set<PersistentDataKey<*>> {
|
||||
return registry.values.toSet()
|
||||
fun getRegisteredKeys(): MutableSet<PersistentDataKey<*>> {
|
||||
return registry.values.toMutableSet()
|
||||
}
|
||||
|
||||
private fun <T> validateKey(key: PersistentDataKey<T>) {
|
||||
val default = key.defaultValue
|
||||
|
||||
when (key.type) {
|
||||
PersistentDataKeyType.INT -> if (default !is Int) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Int")
|
||||
}
|
||||
PersistentDataKeyType.DOUBLE -> if (default !is Double) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Double")
|
||||
}
|
||||
PersistentDataKeyType.BOOLEAN -> if (default !is Boolean) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Boolean")
|
||||
}
|
||||
PersistentDataKeyType.STRING -> if (default !is String) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be String")
|
||||
}
|
||||
PersistentDataKeyType.STRING_LIST -> if (default !is List<*> || default.firstOrNull() !is String?) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be String List")
|
||||
}
|
||||
PersistentDataKeyType.CONFIG -> if (default !is Config) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be Config")
|
||||
}
|
||||
PersistentDataKeyType.BIG_DECIMAL -> if (default !is BigDecimal) {
|
||||
throw IllegalArgumentException("Invalid Data Type! Should be BigDecimal")
|
||||
}
|
||||
|
||||
else -> throw NullPointerException("Null value found!")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,185 @@
|
||||
package com.willfp.eco.internal.spigot.data
|
||||
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.profile
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.ServerLocking
|
||||
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
||||
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
|
||||
import org.bukkit.Bukkit
|
||||
import java.util.UUID
|
||||
|
||||
val serverProfileUUID = UUID(0, 0)
|
||||
|
||||
class ProfileHandler(
|
||||
private val type: HandlerType,
|
||||
private val plugin: EcoSpigotPlugin
|
||||
) {
|
||||
private val loaded = mutableMapOf<UUID, EcoProfile>()
|
||||
|
||||
private val localHandler = YamlDataHandler(plugin, this)
|
||||
|
||||
val handler: DataHandler = when (type) {
|
||||
HandlerType.YAML -> localHandler
|
||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||
}
|
||||
|
||||
fun accessLoadedProfile(uuid: UUID): EcoProfile? =
|
||||
loaded[uuid]
|
||||
|
||||
fun loadGenericProfile(uuid: UUID): Profile {
|
||||
val found = loaded[uuid]
|
||||
if (found != null) {
|
||||
return found
|
||||
}
|
||||
|
||||
val data = mutableMapOf<PersistentDataKey<*>, Any>()
|
||||
|
||||
val profile = if (uuid == serverProfileUUID)
|
||||
EcoServerProfile(data, handler, localHandler) else EcoPlayerProfile(data, uuid, handler, localHandler)
|
||||
|
||||
loaded[uuid] = profile
|
||||
return profile
|
||||
}
|
||||
|
||||
fun load(uuid: UUID): PlayerProfile {
|
||||
return loadGenericProfile(uuid) as PlayerProfile
|
||||
}
|
||||
|
||||
fun loadServerProfile(): ServerProfile {
|
||||
return loadGenericProfile(serverProfileUUID) as ServerProfile
|
||||
}
|
||||
|
||||
fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||
val profile = accessLoadedProfile(uuid) ?: return
|
||||
val map = mutableMapOf<PersistentDataKey<*>, Any>()
|
||||
|
||||
for (key in keys) {
|
||||
map[key] = profile.data[key] ?: continue
|
||||
}
|
||||
|
||||
handler.saveKeysFor(uuid, map)
|
||||
|
||||
// Don't save to local handler if it's the same handler.
|
||||
if (localHandler != handler) {
|
||||
localHandler.saveKeysFor(uuid, map)
|
||||
}
|
||||
}
|
||||
|
||||
fun unloadPlayer(uuid: UUID) {
|
||||
loaded.remove(uuid)
|
||||
}
|
||||
|
||||
fun save() {
|
||||
handler.save()
|
||||
|
||||
if (localHandler != handler) {
|
||||
localHandler.save()
|
||||
}
|
||||
}
|
||||
|
||||
fun migrateIfNeeded() {
|
||||
if (!plugin.configYml.getBool("perform-data-migration")) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!plugin.dataYml.has("previous-handler")) {
|
||||
plugin.dataYml.set("previous-handler", type.name)
|
||||
plugin.dataYml.save()
|
||||
}
|
||||
|
||||
|
||||
val previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
|
||||
|
||||
if (previousHandlerType == type) {
|
||||
return
|
||||
}
|
||||
|
||||
val previousHandler = when (previousHandlerType) {
|
||||
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||
}
|
||||
|
||||
ServerLocking.lock("Migrating player data! Check console for more information.")
|
||||
|
||||
plugin.logger.info("eco has detected a change in data handler!")
|
||||
plugin.logger.info("Migrating server data from ${previousHandlerType.name} to ${type.name}")
|
||||
plugin.logger.info("This will take a while!")
|
||||
|
||||
plugin.logger.info("Initializing previous handler...")
|
||||
previousHandler.initialize()
|
||||
|
||||
val players = Bukkit.getOfflinePlayers().map { it.uniqueId }
|
||||
|
||||
plugin.logger.info("Found data for ${players.size} players!")
|
||||
|
||||
/*
|
||||
Declared here as its own function to be able to use T.
|
||||
*/
|
||||
fun <T : Any> migrateKey(uuid: UUID, key: PersistentDataKey<T>, from: DataHandler, to: DataHandler) {
|
||||
val previous: T? = from.read(uuid, key)
|
||||
if (previous != null) {
|
||||
Bukkit.getOfflinePlayer(uuid).profile.write(key, previous) // Nope, no idea.
|
||||
to.write(uuid, key, previous)
|
||||
}
|
||||
}
|
||||
|
||||
var i = 1
|
||||
for (uuid in players) {
|
||||
plugin.logger.info("Migrating data for $uuid... ($i / ${players.size})")
|
||||
for (key in PersistentDataKey.values()) {
|
||||
// Why this? Because known points *really* likes to break things with the legacy MySQL handler.
|
||||
if (key.key.key == "known_points") {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
migrateKey(uuid, key, previousHandler, handler)
|
||||
} catch (e: Exception) {
|
||||
plugin.logger.info("Could not migrate ${key.key} for $uuid! This is probably because they do not have any data.")
|
||||
}
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
plugin.logger.info("Saving new data...")
|
||||
handler.save()
|
||||
plugin.logger.info("Updating previous handler...")
|
||||
plugin.dataYml.set("previous-handler", type.name)
|
||||
plugin.dataYml.save()
|
||||
plugin.logger.info("The server will now automatically be restarted...")
|
||||
|
||||
ServerLocking.unlock()
|
||||
|
||||
Bukkit.getServer().shutdown()
|
||||
}
|
||||
|
||||
fun initialize() {
|
||||
handler.initialize()
|
||||
if (localHandler != handler) {
|
||||
localHandler.initialize()
|
||||
}
|
||||
}
|
||||
|
||||
fun startAutosaving() {
|
||||
if (!plugin.configYml.getBool("yaml.autosave")) {
|
||||
return
|
||||
}
|
||||
|
||||
val interval = plugin.configYml.getInt("yaml.autosave-interval") * 20L
|
||||
|
||||
plugin.scheduler.runTimer(20, interval) {
|
||||
handler.saveAsync()
|
||||
localHandler.saveAsync()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.registry.KRegistrable
|
||||
import com.willfp.eco.core.registry.Registry
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
|
||||
|
||||
abstract class PersistentDataHandlerFactory(
|
||||
override val id: String
|
||||
): KRegistrable {
|
||||
abstract fun create(plugin: EcoSpigotPlugin): PersistentDataHandler
|
||||
}
|
||||
|
||||
object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
|
||||
init {
|
||||
register(object : PersistentDataHandlerFactory("yaml") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
YamlPersistentDataHandler(plugin)
|
||||
})
|
||||
|
||||
register(object : PersistentDataHandlerFactory("mysql") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
|
||||
})
|
||||
|
||||
register(object : PersistentDataHandlerFactory("mongodb") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
})
|
||||
|
||||
// Configs should also accept "mongo"
|
||||
register(object : PersistentDataHandlerFactory("mongo") {
|
||||
override fun create(plugin: EcoSpigotPlugin) =
|
||||
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.mongodb.MongoClientSettings
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.BsonArray
|
||||
import org.bson.BsonBoolean
|
||||
import org.bson.BsonDecimal128
|
||||
import org.bson.BsonDocument
|
||||
import org.bson.BsonDouble
|
||||
import org.bson.BsonInt32
|
||||
import org.bson.BsonString
|
||||
import org.bson.BsonValue
|
||||
import org.bson.codecs.configuration.CodecRegistries
|
||||
import org.bson.codecs.pojo.PojoCodecProvider
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class LegacyMongoDBPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("legacy_mongodb") {
|
||||
private val codecRegistry = CodecRegistries.fromRegistries(
|
||||
MongoClientSettings.getDefaultCodecRegistry(),
|
||||
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
|
||||
)
|
||||
|
||||
private val client = MongoClient.create(config.getString("url"))
|
||||
private val database = client.getDatabase(config.getString("database"))
|
||||
|
||||
private val collection = database.getCollection<BsonDocument>("uuidprofile")
|
||||
.withCodecRegistry(codecRegistry)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : LegacyMongoSerializer<String>() {
|
||||
override fun deserialize(value: BsonValue): String {
|
||||
return value.asString().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : LegacyMongoSerializer<Boolean>() {
|
||||
override fun deserialize(value: BsonValue): Boolean {
|
||||
return value.asBoolean().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : LegacyMongoSerializer<Int>() {
|
||||
override fun deserialize(value: BsonValue): Int {
|
||||
return value.asInt32().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : LegacyMongoSerializer<Double>() {
|
||||
override fun deserialize(value: BsonValue): Double {
|
||||
return value.asDouble().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : LegacyMongoSerializer<List<String>>() {
|
||||
override fun deserialize(value: BsonValue): List<String> {
|
||||
return value.asArray().values.map { it.asString().value }
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : LegacyMongoSerializer<BigDecimal>() {
|
||||
override fun deserialize(value: BsonValue): BigDecimal {
|
||||
return value.asDecimal128().value.bigDecimalValue()
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : LegacyMongoSerializer<Config>() {
|
||||
private fun deserializeConfigValue(value: BsonValue): Any {
|
||||
return when (value) {
|
||||
is BsonString -> value.value
|
||||
is BsonInt32 -> value.value
|
||||
is BsonDouble -> value.value
|
||||
is BsonBoolean -> value.value
|
||||
is BsonDecimal128 -> value.value.bigDecimalValue()
|
||||
is BsonArray -> value.values.map { deserializeConfigValue(it) }
|
||||
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }
|
||||
|
||||
else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Config {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return runBlocking {
|
||||
collection.find().toList().map {
|
||||
UUID.fromString(it.getString("_id").value)
|
||||
}.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class LegacyMongoSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
val filter = Filters.eq("_id", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter)
|
||||
.firstOrNull() ?: return@runBlocking null
|
||||
|
||||
val dataMap = profile.getDocument("data")
|
||||
val value = dataMap[key.key.toString()] ?: return@runBlocking null
|
||||
|
||||
try {
|
||||
return@runBlocking deserialize(value)
|
||||
} catch (e: Exception) {
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
throw UnsupportedOperationException("Legacy Mongo does not support writing")
|
||||
}
|
||||
|
||||
protected abstract fun deserialize(value: BsonValue): T
|
||||
}
|
||||
|
||||
object Factory: PersistentDataHandlerFactory("legacy_mongo") {
|
||||
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
|
||||
return LegacyMongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class LegacyMySQLPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("legacy_mysql") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
private val table = object : UUIDTable("eco_data") {
|
||||
val data = text("json_data", eagerLoading = true)
|
||||
}
|
||||
|
||||
init {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, LegacyMySQLSerializer<String>())
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacyMySQLSerializer<Boolean>())
|
||||
PersistentDataKeyType.INT.registerSerializer(this, LegacyMySQLSerializer<Int>())
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacyMySQLSerializer<Double>())
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacyMySQLSerializer<BigDecimal>())
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacyMySQLSerializer<Config>())
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacyMySQLSerializer<List<String>>())
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll()
|
||||
.map { it[table.id] }
|
||||
.toSet()
|
||||
}.map { it.value }.toSet()
|
||||
}
|
||||
|
||||
private inner class LegacyMySQLSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val json = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { table.id eq uuid }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.data)
|
||||
}
|
||||
|
||||
if (json == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
val data = readConfig(json, ConfigType.JSON)
|
||||
|
||||
val value: Any? = when (key.type) {
|
||||
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return value as? T?
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
throw UnsupportedOperationException("Legacy MySQL does not support writing")
|
||||
}
|
||||
}
|
||||
|
||||
object Factory: PersistentDataHandlerFactory("legacy_mysql") {
|
||||
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
|
||||
return LegacyMySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,192 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.mongodb.MongoClientSettings
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.client.model.ReplaceOptions
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.BsonArray
|
||||
import org.bson.BsonBoolean
|
||||
import org.bson.BsonDecimal128
|
||||
import org.bson.BsonDocument
|
||||
import org.bson.BsonDouble
|
||||
import org.bson.BsonInt32
|
||||
import org.bson.BsonObjectId
|
||||
import org.bson.BsonString
|
||||
import org.bson.BsonValue
|
||||
import org.bson.codecs.configuration.CodecRegistries
|
||||
import org.bson.codecs.pojo.PojoCodecProvider
|
||||
import org.bson.types.Decimal128
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class MongoDBPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("mongo") {
|
||||
private val codecRegistry = CodecRegistries.fromRegistries(
|
||||
MongoClientSettings.getDefaultCodecRegistry(),
|
||||
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
|
||||
)
|
||||
|
||||
private val client = MongoClient.create(config.getString("url"))
|
||||
private val database = client.getDatabase(config.getString("database"))
|
||||
|
||||
private val collection = database.getCollection<BsonDocument>(config.getString("collection"))
|
||||
.withCodecRegistry(codecRegistry)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : MongoSerializer<String>() {
|
||||
override fun serialize(value: String): BsonValue {
|
||||
return BsonString(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): String {
|
||||
return value.asString().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : MongoSerializer<Boolean>() {
|
||||
override fun serialize(value: Boolean): BsonValue {
|
||||
return BsonBoolean(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Boolean {
|
||||
return value.asBoolean().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : MongoSerializer<Int>() {
|
||||
override fun serialize(value: Int): BsonValue {
|
||||
return BsonInt32(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Int {
|
||||
return value.asInt32().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : MongoSerializer<Double>() {
|
||||
override fun serialize(value: Double): BsonValue {
|
||||
return BsonDouble(value)
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Double {
|
||||
return value.asDouble().value
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MongoSerializer<List<String>>() {
|
||||
override fun serialize(value: List<String>): BsonValue {
|
||||
return BsonArray(value.map { BsonString(it) })
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): List<String> {
|
||||
return value.asArray().values.map { it.asString().value }
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : MongoSerializer<BigDecimal>() {
|
||||
override fun serialize(value: BigDecimal): BsonValue {
|
||||
return BsonDecimal128(Decimal128(value))
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): BigDecimal {
|
||||
return value.asDecimal128().value.bigDecimalValue()
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer<Config>() {
|
||||
private fun deserializeConfigValue(value: BsonValue): Any {
|
||||
return when (value) {
|
||||
is BsonString -> value.value
|
||||
is BsonInt32 -> value.value
|
||||
is BsonDouble -> value.value
|
||||
is BsonBoolean -> value.value
|
||||
is BsonDecimal128 -> value.value.bigDecimalValue()
|
||||
is BsonArray -> value.values.map { deserializeConfigValue(it) }
|
||||
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }
|
||||
|
||||
else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
private fun serializeConfigValue(value: Any): BsonValue {
|
||||
return when (value) {
|
||||
is String -> BsonString(value)
|
||||
is Int -> BsonInt32(value)
|
||||
is Double -> BsonDouble(value)
|
||||
is Boolean -> BsonBoolean(value)
|
||||
is BigDecimal -> BsonDecimal128(Decimal128(value))
|
||||
is List<*> -> BsonArray(value.map { serializeConfigValue(it!!) })
|
||||
is Map<*, *> -> BsonDocument().apply {
|
||||
value.forEach { (k, v) -> append(k.toString(), serializeConfigValue(v!!)) }
|
||||
}
|
||||
|
||||
else -> throw IllegalArgumentException("Could not serialize config value type ${value::class.simpleName}")
|
||||
}
|
||||
}
|
||||
|
||||
override fun serialize(value: Config): BsonValue {
|
||||
return serializeConfigValue(value.toMap())
|
||||
}
|
||||
|
||||
override fun deserialize(value: BsonValue): Config {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return runBlocking {
|
||||
collection.find().toList().map {
|
||||
UUID.fromString(it.getString("uuid").value)
|
||||
}.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class MongoSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
val filter = Filters.eq("uuid", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter)
|
||||
.firstOrNull() ?: return@runBlocking null
|
||||
|
||||
val value = profile[key.key.toString()] ?: return@runBlocking null
|
||||
|
||||
deserialize(value)
|
||||
}
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
runBlocking {
|
||||
val filter = Filters.eq("uuid", uuid.toString())
|
||||
|
||||
val profile = collection.find(filter).firstOrNull()
|
||||
?: BsonDocument()
|
||||
.append("_id", BsonObjectId())
|
||||
.append("uuid", BsonString(uuid.toString()))
|
||||
|
||||
profile.append(key.key.toString(), serialize(value))
|
||||
|
||||
collection.replaceOne(
|
||||
filter,
|
||||
profile,
|
||||
ReplaceOptions().upsert(true)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract fun serialize(value: T): BsonValue
|
||||
protected abstract fun deserialize(value: BsonValue): T
|
||||
}
|
||||
}
|
||||
@@ -1,267 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.greaterEq
|
||||
import org.jetbrains.exposed.sql.Table
|
||||
import org.jetbrains.exposed.sql.and
|
||||
import org.jetbrains.exposed.sql.deleteWhere
|
||||
import org.jetbrains.exposed.sql.replace
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.jetbrains.exposed.sql.upsert
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
import kotlin.math.pow
|
||||
|
||||
private const val VALUE_COLUMN_NAME = "dataValue"
|
||||
private const val UUID_COLUMN_NAME = "profileUUID"
|
||||
private const val KEY_COLUMN_NAME = "dataKey"
|
||||
private const val INDEX_COLUMN_NAME = "listIndex"
|
||||
|
||||
class MySQLPersistentDataHandler(
|
||||
config: Config
|
||||
) : PersistentDataHandler("mysql") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val prefix = config.getString("prefix")
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : DirectStoreSerializer<String>() {
|
||||
override val table = object : KeyTable<String>("string") {
|
||||
override val value = varchar(VALUE_COLUMN_NAME, 256)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : DirectStoreSerializer<Boolean>() {
|
||||
override val table = object : KeyTable<Boolean>("boolean") {
|
||||
override val value = bool(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : DirectStoreSerializer<Int>() {
|
||||
override val table = object : KeyTable<Int>("int") {
|
||||
override val value = integer(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : DirectStoreSerializer<Double>() {
|
||||
override val table = object : KeyTable<Double>("double") {
|
||||
override val value = double(VALUE_COLUMN_NAME)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : DirectStoreSerializer<BigDecimal>() {
|
||||
override val table = object : KeyTable<BigDecimal>("big_decimal") {
|
||||
// 34 digits of precision, 4 digits of scale
|
||||
override val value = decimal(VALUE_COLUMN_NAME, 34, 4)
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : SingleValueSerializer<Config, String>() {
|
||||
override val table = object : KeyTable<String>("config") {
|
||||
override val value = text(VALUE_COLUMN_NAME)
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: String): Config {
|
||||
return readConfig(value, ConfigType.JSON)
|
||||
}
|
||||
|
||||
override fun convertToStored(value: Config): String {
|
||||
// Store config as JSON
|
||||
return if (value.type == ConfigType.JSON) {
|
||||
value.toPlaintext()
|
||||
} else {
|
||||
Configs.fromMap(value.toMap(), ConfigType.JSON).toPlaintext()
|
||||
}
|
||||
}
|
||||
}.createTable())
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MultiValueSerializer<String>() {
|
||||
override val table = object : ListKeyTable<String>("string_list") {
|
||||
override val value = varchar(VALUE_COLUMN_NAME, 256)
|
||||
}
|
||||
}.createTable())
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
val savedUUIDs = mutableSetOf<UUID>()
|
||||
|
||||
for (keyType in PersistentDataKeyType.values()) {
|
||||
val serializer = keyType.getSerializer(this) as MySQLSerializer<*>
|
||||
savedUUIDs.addAll(serializer.getSavedUUIDs())
|
||||
}
|
||||
|
||||
return savedUUIDs
|
||||
}
|
||||
|
||||
private abstract inner class MySQLSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
protected abstract val table: ProfileTable
|
||||
|
||||
fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll().map { it[table.uuid] }.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
fun createTable(): MySQLSerializer<T> {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
// T is the key type
|
||||
// S is the stored value type
|
||||
private abstract inner class SingleValueSerializer<T : Any, S : Any> : MySQLSerializer<T>() {
|
||||
abstract override val table: KeyTable<S>
|
||||
|
||||
abstract fun convertToStored(value: T): S
|
||||
abstract fun convertFromStored(value: S): T
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val stored = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.value)
|
||||
}
|
||||
|
||||
return stored?.let { convertFromStored(it) }
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
withRetries {
|
||||
transaction(database) {
|
||||
table.upsert {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.value] = convertToStored(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class DirectStoreSerializer<T : Any> : SingleValueSerializer<T, T>() {
|
||||
override fun convertToStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class MultiValueSerializer<T : Any> : MySQLSerializer<List<T>>() {
|
||||
abstract override val table: ListKeyTable<T>
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<List<T>>): List<T>? {
|
||||
val stored = transaction(database) {
|
||||
table.selectAll()
|
||||
.where { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.orderBy(table.index)
|
||||
.map { it[table.value] }
|
||||
}
|
||||
|
||||
return stored
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<List<T>>, value: List<T>) {
|
||||
withRetries {
|
||||
transaction(database) {
|
||||
// Remove existing values greater than the new list size
|
||||
table.deleteWhere {
|
||||
(table.uuid eq uuid) and
|
||||
(table.key eq key.key.toString()) and
|
||||
(table.index greaterEq value.size)
|
||||
}
|
||||
|
||||
// Replace existing values in bounds
|
||||
value.forEachIndexed { index, t ->
|
||||
table.replace {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.index] = index
|
||||
it[table.value] = t
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ProfileTable(name: String) : Table(prefix + name) {
|
||||
val uuid = uuid(UUID_COLUMN_NAME)
|
||||
}
|
||||
|
||||
private abstract inner class KeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar(KEY_COLUMN_NAME, 128)
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key)
|
||||
|
||||
init {
|
||||
uniqueIndex(uuid, key)
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ListKeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar(KEY_COLUMN_NAME, 128)
|
||||
val index = integer(INDEX_COLUMN_NAME)
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key, index)
|
||||
|
||||
init {
|
||||
uniqueIndex(uuid, key, index)
|
||||
}
|
||||
}
|
||||
|
||||
private inline fun <T> withRetries(action: () -> T): T? {
|
||||
var retries = 1
|
||||
while (true) {
|
||||
try {
|
||||
return action()
|
||||
} catch (e: Exception) {
|
||||
if (retries > 5) {
|
||||
return null
|
||||
}
|
||||
retries++
|
||||
|
||||
// Exponential backoff
|
||||
runBlocking {
|
||||
delay(2.0.pow(retries.toDouble()).toLong())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers.impl
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class YamlPersistentDataHandler(
|
||||
plugin: EcoSpigotPlugin
|
||||
) : PersistentDataHandler("yaml") {
|
||||
private val dataYml = plugin.dataYml
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : YamlSerializer<String>() {
|
||||
override fun read(config: Config, key: String) = config.getStringOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : YamlSerializer<Boolean>() {
|
||||
override fun read(config: Config, key: String) = config.getBoolOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : YamlSerializer<Int>() {
|
||||
override fun read(config: Config, key: String) = config.getIntOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : YamlSerializer<Double>() {
|
||||
override fun read(config: Config, key: String) = config.getDoubleOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : YamlSerializer<List<String>>() {
|
||||
override fun read(config: Config, key: String) = config.getStringsOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : YamlSerializer<Config>() {
|
||||
override fun read(config: Config, key: String) = config.getSubsectionOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : YamlSerializer<BigDecimal>() {
|
||||
override fun read(config: Config, key: String) = config.getBigDecimalOrNull(key)
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return dataYml.getSubsection("player").getKeys(false)
|
||||
.map { UUID.fromString(it) }
|
||||
.toSet()
|
||||
}
|
||||
|
||||
override fun shouldAutosave(): Boolean {
|
||||
return true
|
||||
}
|
||||
|
||||
override fun doSave() {
|
||||
dataYml.save()
|
||||
}
|
||||
|
||||
private abstract inner class YamlSerializer<T: Any>: DataTypeSerializer<T>() {
|
||||
protected abstract fun read(config: Config, key: String): T?
|
||||
|
||||
final override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return read(dataYml, "player.$uuid.${key.key}")
|
||||
}
|
||||
|
||||
final override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
dataYml.set("player.$uuid.${key.key}", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,141 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.ServerLocking
|
||||
import com.willfp.eco.internal.spigot.data.KeyRegistry
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
|
||||
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoServerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.impl.serverProfileUUID
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
const val LEGACY_MIGRATED_KEY = "legacy-data-migrated"
|
||||
|
||||
class ProfileHandler(
|
||||
private val plugin: EcoSpigotPlugin
|
||||
) {
|
||||
private val handlerId = plugin.configYml.getString("data-handler")
|
||||
|
||||
val localHandler = YamlPersistentDataHandler(plugin)
|
||||
val defaultHandler = PersistentDataHandlers[handlerId]?.create(plugin)
|
||||
?: throw IllegalArgumentException("Invalid data handler ($handlerId)")
|
||||
|
||||
val profileWriter = ProfileWriter(plugin, this)
|
||||
|
||||
private val loaded = ConcurrentHashMap<UUID, EcoProfile>()
|
||||
|
||||
fun getPlayerProfile(uuid: UUID): EcoPlayerProfile {
|
||||
return loaded.computeIfAbsent(uuid) {
|
||||
EcoPlayerProfile(it, this)
|
||||
} as EcoPlayerProfile
|
||||
}
|
||||
|
||||
fun getServerProfile(): EcoServerProfile {
|
||||
return loaded.computeIfAbsent(serverProfileUUID) {
|
||||
EcoServerProfile(this)
|
||||
} as EcoServerProfile
|
||||
}
|
||||
|
||||
fun unloadProfile(uuid: UUID) {
|
||||
loaded.remove(uuid)
|
||||
}
|
||||
|
||||
fun save() {
|
||||
localHandler.shutdown()
|
||||
defaultHandler.shutdown()
|
||||
}
|
||||
|
||||
fun migrateIfNecessary(): Boolean {
|
||||
if (!plugin.configYml.getBool("perform-data-migration")) {
|
||||
return false
|
||||
}
|
||||
|
||||
// First install
|
||||
if (!plugin.dataYml.has("previous-handler")) {
|
||||
plugin.dataYml.set("previous-handler", defaultHandler.id)
|
||||
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
|
||||
plugin.dataYml.save()
|
||||
return false
|
||||
}
|
||||
|
||||
val previousHandlerId = plugin.dataYml.getString("previous-handler").lowercase()
|
||||
if (previousHandlerId != defaultHandler.id) {
|
||||
val fromFactory = PersistentDataHandlers[previousHandlerId] ?: return false
|
||||
scheduleMigration(fromFactory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
|
||||
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
|
||||
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (defaultHandler is MongoDBPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
|
||||
plugin.logger.info("eco has detected a legacy MongoDB database. Migrating to new MongoDB database...")
|
||||
scheduleMigration(LegacyMongoDBPersistentDataHandler.Factory)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
private fun scheduleMigration(fromFactory: PersistentDataHandlerFactory) {
|
||||
ServerLocking.lock("Migrating player data! Check console for more information.")
|
||||
|
||||
// Run after 5 ticks to allow plugins to load their data keys
|
||||
plugin.scheduler.runLater(5) {
|
||||
doMigrate(fromFactory)
|
||||
|
||||
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
|
||||
plugin.dataYml.save()
|
||||
}
|
||||
}
|
||||
|
||||
private fun doMigrate(fromFactory: PersistentDataHandlerFactory) {
|
||||
plugin.logger.info("eco has detected a change in data handler")
|
||||
plugin.logger.info("${fromFactory.id} --> ${defaultHandler.id}")
|
||||
plugin.logger.info("This will take a while! Players will not be able to join during this time.")
|
||||
|
||||
val fromHandler = fromFactory.create(plugin)
|
||||
val toHandler = defaultHandler
|
||||
|
||||
val keys = KeyRegistry.getRegisteredKeys()
|
||||
|
||||
plugin.logger.info("Keys to migrate: ${keys.map { it.key }.joinToString(", ") }}")
|
||||
|
||||
plugin.logger.info("Loading profile UUIDs from ${fromFactory.id}...")
|
||||
plugin.logger.info("This step may take a while depending on the size of your database.")
|
||||
|
||||
val uuids = fromHandler.getSavedUUIDs()
|
||||
|
||||
plugin.logger.info("Found ${uuids.size} profiles to migrate")
|
||||
|
||||
for ((index, uuid) in uuids.withIndex()) {
|
||||
plugin.logger.info("(${index + 1}/${uuids.size}) Migrating $uuid")
|
||||
val profile = fromHandler.serializeProfile(uuid, keys)
|
||||
toHandler.loadSerializedProfile(profile)
|
||||
}
|
||||
|
||||
plugin.logger.info("Profile writes submitted! Waiting for completion...")
|
||||
toHandler.shutdown()
|
||||
|
||||
plugin.logger.info("Updating previous handler...")
|
||||
plugin.dataYml.set("previous-handler", handlerId)
|
||||
plugin.dataYml.save()
|
||||
plugin.logger.info("The server will now automatically be restarted...")
|
||||
|
||||
plugin.server.shutdown()
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
/*
|
||||
The profile writer exists as an optimization to batch writes to the database.
|
||||
|
||||
This is necessary because values frequently change multiple times per tick,
|
||||
and we don't want to write to the database every time a value changes.
|
||||
|
||||
Instead, we only commit the last value that was set every interval (default 1 tick).
|
||||
*/
|
||||
|
||||
|
||||
class ProfileWriter(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) {
|
||||
private val saveInterval = plugin.configYml.getInt("save-interval").toLong()
|
||||
private val autosaveInterval = plugin.configYml.getInt("autosave-interval").toLong()
|
||||
private val valuesToWrite = ConcurrentHashMap<WriteRequest<*>, Any>()
|
||||
|
||||
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
valuesToWrite[WriteRequest(uuid, key)] = value
|
||||
}
|
||||
|
||||
fun startTickingSaves() {
|
||||
plugin.scheduler.runTimer(20, saveInterval) {
|
||||
val iterator = valuesToWrite.iterator()
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
val (request, value) = iterator.next()
|
||||
iterator.remove()
|
||||
|
||||
val dataHandler = if (request.key.isSavedLocally) handler.localHandler else handler.defaultHandler
|
||||
|
||||
// Pass the value to the data handler
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
dataHandler.write(request.uuid, request.key as PersistentDataKey<Any>, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun startTickingAutosave() {
|
||||
plugin.scheduler.runTimer(autosaveInterval, autosaveInterval) {
|
||||
if (handler.localHandler.shouldAutosave()) {
|
||||
handler.localHandler.save()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private data class WriteRequest<T>(val uuid: UUID, val key: PersistentDataKey<T>)
|
||||
}
|
||||
|
||||
val PersistentDataKey<*>.isSavedLocally: Boolean
|
||||
get() = this.isLocal || EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true
|
||||
@@ -1,14 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.PlayerProfile
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import java.util.UUID
|
||||
|
||||
class EcoPlayerProfile(
|
||||
uuid: UUID,
|
||||
handler: ProfileHandler
|
||||
) : EcoProfile(uuid, handler), PlayerProfile {
|
||||
override fun toString(): String {
|
||||
return "EcoPlayerProfile{uuid=$uuid}"
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.Profile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.internal.spigot.data.profiles.isSavedLocally
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
abstract class EcoProfile(
|
||||
val uuid: UUID,
|
||||
private val handler: ProfileHandler
|
||||
) : Profile {
|
||||
private val data = ConcurrentHashMap<PersistentDataKey<*>, Any>()
|
||||
|
||||
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
|
||||
this.data[key] = value
|
||||
|
||||
handler.profileWriter.write(uuid, key, value)
|
||||
}
|
||||
|
||||
override fun <T : Any> read(key: PersistentDataKey<T>): T {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
if (this.data.containsKey(key)) {
|
||||
return this.data[key] as T
|
||||
}
|
||||
|
||||
this.data[key] = if (key.isSavedLocally) {
|
||||
handler.localHandler.read(uuid, key)
|
||||
} else {
|
||||
handler.defaultHandler.read(uuid, key)
|
||||
} ?: key.defaultValue
|
||||
|
||||
return read(key)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is EcoProfile) {
|
||||
return false
|
||||
}
|
||||
|
||||
return this.uuid == other.uuid
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return this.uuid.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
package com.willfp.eco.internal.spigot.data.profiles.impl
|
||||
|
||||
import com.willfp.eco.core.data.ServerProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import java.util.UUID
|
||||
|
||||
val serverIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
""
|
||||
)
|
||||
|
||||
val localServerIDKey = PersistentDataKey(
|
||||
namespacedKeyOf("eco", "local_server_id"),
|
||||
PersistentDataKeyType.STRING,
|
||||
"",
|
||||
true
|
||||
)
|
||||
|
||||
val serverProfileUUID = UUID(0, 0)
|
||||
|
||||
class EcoServerProfile(
|
||||
handler: ProfileHandler
|
||||
) : EcoProfile(serverProfileUUID, handler), ServerProfile {
|
||||
override fun getServerID(): String {
|
||||
if (this.read(serverIDKey).isBlank()) {
|
||||
this.write(serverIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(serverIDKey)
|
||||
}
|
||||
|
||||
override fun getLocalServerID(): String {
|
||||
if (this.read(localServerIDKey).isBlank()) {
|
||||
this.write(localServerIDKey, UUID.randomUUID().toString())
|
||||
}
|
||||
|
||||
return this.read(localServerIDKey)
|
||||
}
|
||||
|
||||
override fun toString(): String {
|
||||
return "EcoServerProfile"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import java.util.UUID
|
||||
|
||||
abstract class DataHandler(
|
||||
val type: HandlerType
|
||||
) {
|
||||
/**
|
||||
* Read value from a key.
|
||||
*/
|
||||
abstract fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T?
|
||||
|
||||
/**
|
||||
* Write value to a key.
|
||||
*/
|
||||
abstract fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T)
|
||||
|
||||
/**
|
||||
* Save a set of keys for a given UUID.
|
||||
*/
|
||||
abstract fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>)
|
||||
|
||||
// Everything below this are methods that are only needed for certain implementations.
|
||||
|
||||
open fun save() {
|
||||
|
||||
}
|
||||
|
||||
open fun saveAsync() {
|
||||
|
||||
}
|
||||
|
||||
open fun initialize() {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
enum class HandlerType {
|
||||
YAML,
|
||||
MYSQL,
|
||||
MONGO
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.client.model.ReplaceOptions
|
||||
import com.mongodb.client.model.UpdateOptions
|
||||
import com.mongodb.client.model.Updates
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.mongodb.kotlin.client.coroutine.MongoCollection
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import org.bson.codecs.pojo.annotations.BsonId
|
||||
import java.util.UUID
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.serialization.Contextual
|
||||
import kotlinx.serialization.SerialName
|
||||
import kotlinx.serialization.Serializable
|
||||
import org.bukkit.Bukkit
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class MongoDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.MONGO) {
|
||||
private val client: MongoClient
|
||||
private val collection: MongoCollection<UUIDProfile>
|
||||
|
||||
private val scope = CoroutineScope(Dispatchers.IO)
|
||||
|
||||
init {
|
||||
System.setProperty(
|
||||
"org.litote.mongo.mapping.service",
|
||||
"org.litote.kmongo.jackson.JacksonClassMappingTypeService"
|
||||
)
|
||||
|
||||
val url = plugin.configYml.getString("mongodb.url")
|
||||
|
||||
client = MongoClient.create(url)
|
||||
collection = client.getDatabase(plugin.configYml.getString("mongodb.database"))
|
||||
.getCollection<UUIDProfile>("uuidprofile") // Compat with jackson mapping
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
doRead(uuid, key)
|
||||
}
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
scope.launch {
|
||||
doWrite(uuid, key, value)
|
||||
}
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
scope.launch {
|
||||
for ((key, value) in keys) {
|
||||
saveKey(uuid, key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun <T : Any> saveKey(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
|
||||
val data = value as T
|
||||
doWrite(uuid, key, data)
|
||||
}
|
||||
|
||||
private suspend fun <T> doWrite(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
val profile = getOrCreateDocument(uuid)
|
||||
|
||||
profile.data.run {
|
||||
if (value == null) {
|
||||
this.remove(key.key.toString())
|
||||
} else {
|
||||
this[key.key.toString()] = value
|
||||
}
|
||||
}
|
||||
|
||||
collection.updateOne(
|
||||
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
|
||||
Updates.set(UUIDProfile::data.name, profile.data)
|
||||
)
|
||||
}
|
||||
|
||||
private suspend fun <T> doRead(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
|
||||
.firstOrNull() ?: return key.defaultValue
|
||||
return profile.data[key.key.toString()] as? T?
|
||||
}
|
||||
|
||||
private suspend fun getOrCreateDocument(uuid: UUID): UUIDProfile {
|
||||
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
|
||||
.firstOrNull()
|
||||
return if (profile == null) {
|
||||
val toInsert = UUIDProfile(
|
||||
uuid.toString(),
|
||||
mutableMapOf()
|
||||
)
|
||||
|
||||
collection.replaceOne(
|
||||
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
|
||||
toInsert,
|
||||
ReplaceOptions().upsert(true)
|
||||
)
|
||||
toInsert
|
||||
} else {
|
||||
profile
|
||||
}
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is MongoDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
|
||||
@Serializable
|
||||
internal data class UUIDProfile(
|
||||
// Storing UUID as strings for serialization
|
||||
@SerialName("_id") val uuid: String,
|
||||
// Storing NamespacedKeys as strings for serialization
|
||||
val data: MutableMap<String, @Contextual Any>
|
||||
)
|
||||
@@ -0,0 +1,169 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.ResultRow
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.TextColumnType
|
||||
import org.jetbrains.exposed.sql.insert
|
||||
import org.jetbrains.exposed.sql.select
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import org.jetbrains.exposed.sql.update
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
/*
|
||||
Better than old MySQL data handler, but that's only because it's literally just dumping all the
|
||||
data into a single text column, containing the contents of the players profile as a Config.
|
||||
|
||||
Whatever. At least it works.
|
||||
*/
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class MySQLDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.MYSQL) {
|
||||
private val database: Database
|
||||
private val table = UUIDTable("eco_data")
|
||||
|
||||
private val rows = Caffeine.newBuilder()
|
||||
.expireAfterWrite(3, TimeUnit.SECONDS)
|
||||
.build<UUID, ResultRow>()
|
||||
|
||||
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
|
||||
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
|
||||
|
||||
private val dataColumn: Column<String>
|
||||
get() = table.columns.first { it.name == "json_data" } as Column<String>
|
||||
|
||||
init {
|
||||
val config = HikariConfig()
|
||||
config.driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
config.username = plugin.configYml.getString("mysql.user")
|
||||
config.password = plugin.configYml.getString("mysql.password")
|
||||
config.jdbcUrl = "jdbc:mysql://" +
|
||||
"${plugin.configYml.getString("mysql.host")}:" +
|
||||
"${plugin.configYml.getString("mysql.port")}/" +
|
||||
plugin.configYml.getString("mysql.database")
|
||||
config.maximumPoolSize = plugin.configYml.getInt("mysql.connections")
|
||||
|
||||
database = Database.connect(HikariDataSource(config))
|
||||
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
|
||||
table.apply {
|
||||
registerColumn<String>("json_data", TextColumnType())
|
||||
}
|
||||
|
||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||
}
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val data = getData(uuid)
|
||||
|
||||
val value: Any? = when (key.type) {
|
||||
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
return value as? T?
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
val data = getData(uuid)
|
||||
data.set(key.key.toString(), value)
|
||||
|
||||
setData(uuid, data)
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
executor.submit {
|
||||
val data = getData(uuid)
|
||||
|
||||
for ((key, value) in keys) {
|
||||
data.set(key.key.toString(), value)
|
||||
}
|
||||
|
||||
doSetData(uuid, data)
|
||||
}
|
||||
}
|
||||
|
||||
private fun getData(uuid: UUID): Config {
|
||||
val plaintext = transaction(database) {
|
||||
val row = rows.get(uuid) {
|
||||
val row = table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||
|
||||
if (row != null) {
|
||||
row
|
||||
} else {
|
||||
transaction(database) {
|
||||
table.insert {
|
||||
it[id] = uuid
|
||||
it[dataColumn] = "{}"
|
||||
}
|
||||
}
|
||||
table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||
}
|
||||
}
|
||||
|
||||
row.getOrNull(dataColumn) ?: "{}"
|
||||
}
|
||||
|
||||
return readConfig(plaintext, ConfigType.JSON)
|
||||
}
|
||||
|
||||
private fun setData(uuid: UUID, config: Config) {
|
||||
executor.submit {
|
||||
doSetData(uuid, config)
|
||||
}
|
||||
}
|
||||
|
||||
private fun doSetData(uuid: UUID, config: Config) {
|
||||
transaction(database) {
|
||||
table.update({ table.id eq uuid }) {
|
||||
it[dataColumn] = config.toPlaintext()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun initialize() {
|
||||
transaction(database) {
|
||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||
}
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is MySQLDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.internal.spigot.data.EcoProfile
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
|
||||
class ProfileSaver(
|
||||
private val plugin: EcoPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) {
|
||||
fun startTicking() {
|
||||
val interval = plugin.configYml.getInt("save-interval").toLong()
|
||||
|
||||
plugin.scheduler.runTimer(20, interval) {
|
||||
val iterator = EcoProfile.CHANGE_MAP.iterator()
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
val uuid = iterator.next()
|
||||
iterator.remove()
|
||||
|
||||
val profile = handler.accessLoadedProfile(uuid) ?: continue
|
||||
|
||||
handler.saveKeysFor(uuid, profile.data.keys)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
package com.willfp.eco.internal.spigot.data.storage
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.willfp.eco.internal.spigot.data.ProfileHandler
|
||||
import org.bukkit.NamespacedKey
|
||||
import java.util.UUID
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
class YamlDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
private val handler: ProfileHandler
|
||||
) : DataHandler(HandlerType.YAML) {
|
||||
private val dataYml = plugin.dataYml
|
||||
|
||||
override fun save() {
|
||||
dataYml.save()
|
||||
}
|
||||
|
||||
override fun saveAsync() {
|
||||
dataYml.saveAsync()
|
||||
}
|
||||
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
// Separate `as T?` for each branch to prevent compiler warnings.
|
||||
val value = when (key.type) {
|
||||
PersistentDataKeyType.INT -> dataYml.getIntOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.DOUBLE -> dataYml.getDoubleOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING -> dataYml.getStringOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BOOLEAN -> dataYml.getBoolOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING_LIST -> dataYml.getStringsOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.CONFIG -> dataYml.getSubsectionOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BIG_DECIMAL -> dataYml.getBigDecimalOrNull("player.$uuid.${key.key}") as T?
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
doWrite(uuid, key.key, value)
|
||||
}
|
||||
|
||||
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
|
||||
for ((key, value) in keys) {
|
||||
doWrite(uuid, key.key, value)
|
||||
}
|
||||
}
|
||||
|
||||
private fun doWrite(uuid: UUID, key: NamespacedKey, value: Any) {
|
||||
dataYml.set("player.$uuid.$key", value)
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
|
||||
return other is YamlDataHandler
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return type.hashCode()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.willfp.eco.internal.spigot.integrations.customblocks
|
||||
|
||||
import com.willfp.eco.core.EcoPlugin
|
||||
import com.willfp.eco.core.blocks.Blocks
|
||||
import com.willfp.eco.core.blocks.CustomBlock
|
||||
import com.willfp.eco.core.blocks.TestableBlock
|
||||
import com.willfp.eco.core.blocks.provider.BlockProvider
|
||||
import com.willfp.eco.core.integrations.customblocks.CustomBlocksIntegration
|
||||
import com.willfp.eco.util.namespacedKeyOf
|
||||
import io.th0rgal.oraxen.api.OraxenBlocks
|
||||
import io.th0rgal.oraxen.api.events.OraxenItemsLoadedEvent
|
||||
import org.bukkit.event.EventHandler
|
||||
import org.bukkit.event.Listener
|
||||
|
||||
class CustomBlocksOraxen(
|
||||
private val plugin: EcoPlugin
|
||||
) : CustomBlocksIntegration, Listener {
|
||||
override fun registerProvider() {
|
||||
plugin.eventManager.registerListener(this)
|
||||
}
|
||||
|
||||
override fun getPluginName(): String {
|
||||
return "Oraxen"
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
@Suppress("UNUSED_PARAMETER")
|
||||
fun onItemRegister(event: OraxenItemsLoadedEvent) {
|
||||
Blocks.registerBlockProvider(OraxenProvider())
|
||||
}
|
||||
|
||||
private class OraxenProvider : BlockProvider("oraxen") {
|
||||
override fun provideForKey(key: String): TestableBlock? {
|
||||
// The key
|
||||
if (!OraxenBlocks.isOraxenBlock(key)) {
|
||||
return null
|
||||
}
|
||||
|
||||
val namespacedKey = namespacedKeyOf("oraxen", key)
|
||||
|
||||
return CustomBlock(
|
||||
namespacedKey,
|
||||
{ block ->
|
||||
// TODO: Implement this
|
||||
TODO("Not yet implemented")
|
||||
},
|
||||
{ location ->
|
||||
OraxenBlocks.place(key, location)
|
||||
location.block
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,8 @@
|
||||
|
||||
# How player/server data is saved:
|
||||
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
|
||||
# mysql - Standard database, great option for multi-node servers (i.e. BungeeCord/Velocity)
|
||||
# mongodb - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
|
||||
# mongo - If you're running on a network (Bungee/Velocity), you should use MongoDB if you can.
|
||||
# mysql - The alternative to MongoDB. Because of how eco data works, MongoDB is the best option; but use this if you can't.
|
||||
data-handler: yaml
|
||||
|
||||
# If data should be migrated automatically when changing data handler.
|
||||
@@ -16,26 +16,25 @@ perform-data-migration: true
|
||||
mongodb:
|
||||
# The full MongoDB connection URL.
|
||||
url: ""
|
||||
|
||||
# The name of the database to use.
|
||||
database: eco
|
||||
|
||||
# The collection to use for player data.
|
||||
collection: profiles
|
||||
database: "eco"
|
||||
|
||||
mysql:
|
||||
# The table prefix to use for all tables.
|
||||
prefix: "eco_"
|
||||
|
||||
# How many threads to execute statements on. Higher numbers can be faster however
|
||||
# very high numbers can cause issues with OS configuration. If writes are taking
|
||||
# too long, increase this value.
|
||||
threads: 2
|
||||
# The maximum number of MySQL connections.
|
||||
connections: 10
|
||||
|
||||
# Connection details for MySQL.
|
||||
host: localhost
|
||||
port: 3306
|
||||
database: database
|
||||
user: username
|
||||
password: p4ssw0rd
|
||||
password: passy
|
||||
|
||||
yaml:
|
||||
autosave: true # If data should be saved automatically
|
||||
autosave-interval: 1800 # How often data should be saved (in seconds)
|
||||
|
||||
# How many ticks to wait between committing data to a database. This doesn't
|
||||
# affect yaml storage, only MySQL and MongoDB. By default, data is committed
|
||||
@@ -43,9 +42,6 @@ mysql:
|
||||
# would be committing once a second.
|
||||
save-interval: 1
|
||||
|
||||
# How many ticks to wait between autosaves for data.yml.
|
||||
autosave-interval: 36000 # 30 minutes
|
||||
|
||||
# Options to manage the conflict finder
|
||||
conflicts:
|
||||
whitelist: # Plugins that should never be marked as conflicts
|
||||
@@ -105,7 +101,7 @@ math-cache-ttl: 200
|
||||
# The time (in minutes) for literal patterns to be cached for. Higher values will lead to
|
||||
# faster evaluation times (less CPU usage) at the expense of slightly more memory usage and
|
||||
# less reactive values. (Do not change unless you are told to).
|
||||
literal-cache-ttl: 10
|
||||
literal-cache-ttl: 1
|
||||
|
||||
# If anonymous usage statistics should be tracked. This is very valuable information as it
|
||||
# helps understand how eco and other plugins are being used by logging player and server
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
version = 6.74.2
|
||||
version = 6.73.2
|
||||
kotlin.incremental.useClasspathSnapshot=false
|
||||
Reference in New Issue
Block a user