Added new data handlers
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
package com.willfp.eco.core.data.handlers;
|
||||
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey;
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Handles data read/write for a {@link com.willfp.eco.core.data.keys.PersistentDataKeyType} for a specific
|
||||
* data handler.
|
||||
*/
|
||||
public abstract class DataTypeSerializer<T> {
|
||||
/**
|
||||
* Create a new data type serializer.
|
||||
*/
|
||||
protected DataTypeSerializer() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @return The value.
|
||||
*/
|
||||
@Nullable
|
||||
public abstract T readAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key);
|
||||
|
||||
/**
|
||||
* Write a value.
|
||||
*
|
||||
* @param uuid The uuid.
|
||||
* @param key The key.
|
||||
* @param value The value.
|
||||
*/
|
||||
public abstract void writeAsync(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value);
|
||||
}
|
||||
@@ -5,8 +5,16 @@ import com.willfp.eco.core.registry.Registrable;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public abstract class PersistentDataHandler implements Registrable {
|
||||
/**
|
||||
@@ -14,6 +22,11 @@ public abstract class PersistentDataHandler implements Registrable {
|
||||
*/
|
||||
private final String id;
|
||||
|
||||
/**
|
||||
* The executor.
|
||||
*/
|
||||
private final ExecutorService executor = Executors.newCachedThreadPool();
|
||||
|
||||
/**
|
||||
* Create a new persistent data handler.
|
||||
*
|
||||
@@ -23,9 +36,38 @@ public abstract class PersistentDataHandler implements Registrable {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull String getID() {
|
||||
return id;
|
||||
/**
|
||||
* Get all UUIDs with saved data.
|
||||
*
|
||||
* @return All saved UUIDs.
|
||||
*/
|
||||
protected abstract Set<UUID> getSavedUUIDs();
|
||||
|
||||
/**
|
||||
* Save to disk.
|
||||
* <p>
|
||||
* If write commits to disk, this method does not need to be overridden.
|
||||
* <p>
|
||||
* This method is called asynchronously.
|
||||
*/
|
||||
protected void doSave() {
|
||||
// Save to disk
|
||||
}
|
||||
|
||||
/**
|
||||
* If the handler should autosave.
|
||||
*
|
||||
* @return If the handler should autosave.
|
||||
*/
|
||||
public boolean shouldAutosave() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the data.
|
||||
*/
|
||||
public final void save() {
|
||||
executor.submit(this::doSave);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -37,7 +79,18 @@ public abstract class PersistentDataHandler implements Registrable {
|
||||
* @return The value, or null if not found.
|
||||
*/
|
||||
@Nullable
|
||||
public abstract <T> T read(@NotNull UUID uuid, @NotNull PersistentDataKey<T> key);
|
||||
public final <T> T read(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
Future<T> future = executor.submit(() -> serializer.readAsync(uuid, key));
|
||||
|
||||
try {
|
||||
return future.get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
e.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a key to persistent data.
|
||||
@@ -47,7 +100,12 @@ public abstract class PersistentDataHandler implements Registrable {
|
||||
* @param value The value.
|
||||
* @param <T> The type of the key.
|
||||
*/
|
||||
public abstract <T> void write(@NotNull UUID uuid, @NotNull PersistentDataKey<T> key, @NotNull T value);
|
||||
public final <T> void write(@NotNull final UUID uuid,
|
||||
@NotNull final PersistentDataKey<T> key,
|
||||
@NotNull final T value) {
|
||||
DataTypeSerializer<T> serializer = key.getType().getSerializer(this);
|
||||
executor.submit(() -> serializer.writeAsync(uuid, key, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize data.
|
||||
@@ -56,12 +114,74 @@ public abstract class PersistentDataHandler implements Registrable {
|
||||
* @return The serialized data.
|
||||
*/
|
||||
@NotNull
|
||||
public abstract Set<SerializedProfile> serializeData(@NotNull final Set<PersistentDataKey<?>> keys);
|
||||
public final Set<SerializedProfile> serializeData(@NotNull final Set<PersistentDataKey<?>> keys) {
|
||||
Set<SerializedProfile> profiles = new HashSet<>();
|
||||
|
||||
for (UUID uuid : getSavedUUIDs()) {
|
||||
Map<PersistentDataKey<?>, Object> data = new HashMap<>();
|
||||
|
||||
for (PersistentDataKey<?> key : keys) {
|
||||
Object value = read(uuid, key);
|
||||
data.put(key, value);
|
||||
}
|
||||
|
||||
profiles.add(new SerializedProfile(uuid, data));
|
||||
}
|
||||
|
||||
return profiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load profile data.
|
||||
*
|
||||
* @param data The data.
|
||||
*/
|
||||
public abstract void loadProfileData(@NotNull Set<SerializedProfile> data);
|
||||
@SuppressWarnings("unchecked")
|
||||
public final void loadProfileData(@NotNull Set<SerializedProfile> data) {
|
||||
for (SerializedProfile profile : data) {
|
||||
for (Map.Entry<PersistentDataKey<?>, Object> entry : profile.data().entrySet()) {
|
||||
PersistentDataKey<?> key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
// This cast is safe because the data is serialized
|
||||
write(profile.uuid(), (PersistentDataKey<? super Object>) key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Await outstanding writes.
|
||||
*/
|
||||
public final void awaitOutstandingWrites() throws InterruptedException {
|
||||
boolean success = executor.awaitTermination(15, TimeUnit.SECONDS);
|
||||
|
||||
if (!success) {
|
||||
throw new InterruptedException("Failed to await outstanding writes");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final @NotNull String getID() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(@Nullable final Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PersistentDataHandler that = (PersistentDataHandler) obj;
|
||||
|
||||
return id.equals(that.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return id.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
package com.willfp.eco.core.data.keys;
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config;
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer;
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@@ -61,18 +65,14 @@ public final class PersistentDataKeyType<T> {
|
||||
private final String name;
|
||||
|
||||
/**
|
||||
* Get the name of the key type.
|
||||
*
|
||||
* @return The name.
|
||||
* The serializers for this key type.
|
||||
*/
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
private final Map<PersistentDataHandler, DataTypeSerializer<T>> serializers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Create new PersistentDataKeyType.
|
||||
*
|
||||
* @param name The name.
|
||||
* @param name The name.
|
||||
*/
|
||||
private PersistentDataKeyType(@NotNull final String name) {
|
||||
VALUES.add(this);
|
||||
@@ -80,6 +80,44 @@ public final class PersistentDataKeyType<T> {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the key type.
|
||||
*
|
||||
* @return The name.
|
||||
*/
|
||||
@NotNull
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a serializer for this key type.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @param serializer The serializer.
|
||||
*/
|
||||
public void registerSerializer(@NotNull final PersistentDataHandler handler,
|
||||
@NotNull final DataTypeSerializer<T> serializer) {
|
||||
this.serializers.put(handler, serializer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the serializer for a handler.
|
||||
*
|
||||
* @param handler The handler.
|
||||
* @return The serializer.
|
||||
*/
|
||||
@NotNull
|
||||
public DataTypeSerializer<T> getSerializer(@NotNull final PersistentDataHandler handler) {
|
||||
DataTypeSerializer<T> serializer = this.serializers.get(handler);
|
||||
|
||||
if (serializer == null) {
|
||||
throw new IllegalArgumentException("No serializer for handler: " + handler);
|
||||
}
|
||||
|
||||
return serializer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable final Object that) {
|
||||
if (this == that) {
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import eu.decentsoftware.holograms.api.utils.scheduler.S
|
||||
import kotlinx.serialization.Contextual
|
||||
import kotlinx.serialization.SerialName
|
||||
import kotlinx.serialization.Serializable
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.Table
|
||||
import org.jetbrains.exposed.sql.Table.Dual.decimal
|
||||
import org.jetbrains.exposed.sql.Table.Dual.double
|
||||
import org.jetbrains.exposed.sql.Table.Dual.varchar
|
||||
import org.jetbrains.exposed.sql.and
|
||||
import org.jetbrains.exposed.sql.deleteWhere
|
||||
import org.jetbrains.exposed.sql.insert
|
||||
import org.jetbrains.exposed.sql.select
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class LegacyMySQLPersistentDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
config: Config
|
||||
) : PersistentDataHandler("mysql_legacy") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
private val table = object : UUIDTable("eco_data") {
|
||||
val data = text("json_data")
|
||||
}
|
||||
|
||||
init {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, LegacySerializer<String>())
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacySerializer<Boolean>())
|
||||
PersistentDataKeyType.INT.registerSerializer(this, LegacySerializer<Int>())
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacySerializer<Double>())
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacySerializer<BigDecimal>())
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacySerializer<Config>())
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacySerializer<List<String>>())
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll()
|
||||
.map { it[table.id] }
|
||||
.toSet()
|
||||
}.map { it.value }.toSet()
|
||||
}
|
||||
|
||||
private inner class LegacySerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val json = transaction(database) {
|
||||
table.select { table.id eq uuid }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.data)
|
||||
}
|
||||
|
||||
if (json == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
val data = readConfig(json, ConfigType.JSON)
|
||||
|
||||
val value: Any? = when (key.type) {
|
||||
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return value as? T?
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
throw UnsupportedOperationException("Legacy MySQL does not support writing")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,107 +1,112 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.mongodb.client.model.Filters
|
||||
import com.mongodb.client.model.ReplaceOptions
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.handlers.SerializedProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import java.util.UUID
|
||||
import java.util.concurrent.Executors
|
||||
import com.mongodb.kotlin.client.coroutine.MongoClient
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.internal.spigot.data.storage.UUIDProfile
|
||||
import kotlinx.coroutines.flow.firstOrNull
|
||||
import kotlinx.coroutines.flow.forEach
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlinx.serialization.Contextual
|
||||
import kotlinx.serialization.SerialName
|
||||
import kotlinx.serialization.Serializable
|
||||
import org.bson.Document
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class MongoPersistentDataHandler(
|
||||
config: Config,
|
||||
plugin: EcoSpigotPlugin
|
||||
) : PersistentDataHandler("yaml") {
|
||||
plugin: EcoSpigotPlugin,
|
||||
config: Config
|
||||
) : PersistentDataHandler("mongo") {
|
||||
private val client = MongoClient.create(config.getString("url"))
|
||||
private val database = client.getDatabase(config.getString("database"))
|
||||
|
||||
private val url: String = config.getString("url") ?: error("MongoDB URL not found in config")
|
||||
private val databaseName: String = config.getString("database") ?: error("Database name not found in config")
|
||||
private val client = MongoClient.create(url)
|
||||
private val database = client.getDatabase(databaseName)
|
||||
// Collection name is set for backwards compatibility
|
||||
private val collection = database.getCollection<UUIDProfile>("uuidprofile")
|
||||
private val executor = Executors.newCachedThreadPool()
|
||||
|
||||
override fun <T: Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, MongoSerializer<String>())
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, MongoSerializer<Boolean>())
|
||||
PersistentDataKeyType.INT.registerSerializer(this, MongoSerializer<Int>())
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, MongoSerializer<Double>())
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, MongoSerializer<List<String>>())
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : MongoSerializer<BigDecimal>() {
|
||||
override fun convertToMongo(value: BigDecimal): Any {
|
||||
return value.toString()
|
||||
}
|
||||
|
||||
override fun convertFromMongo(value: Any): BigDecimal {
|
||||
return BigDecimal(value.toString())
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer<Config>() {
|
||||
override fun convertToMongo(value: Config): Any {
|
||||
return value.toMap()
|
||||
}
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
override fun convertFromMongo(value: Any): Config {
|
||||
return Configs.fromMap(value as Map<String, Any>)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return runBlocking {
|
||||
doRead(uuid, key)
|
||||
collection.find().toList().map { UUID.fromString(it.uuid) }.toSet()
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun <T: Any> doRead(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val document = collection.find(Document("uuid", uuid.toString())).firstOrNull() ?: return null
|
||||
val data = document.data[key.key.toString()] as? T
|
||||
private open inner class MongoSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return runBlocking {
|
||||
val profile = collection.find(Filters.eq("uuid", uuid.toString())).firstOrNull()
|
||||
?: return@runBlocking null
|
||||
|
||||
return data
|
||||
}
|
||||
val value = profile.data[key.key.toString()]
|
||||
?: return@runBlocking null
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
executor.submit {
|
||||
convertFromMongo(value)
|
||||
}
|
||||
}
|
||||
|
||||
protected open fun convertToMongo(value: T): Any {
|
||||
return value
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
runBlocking {
|
||||
doWrite(uuid, key, value)
|
||||
val profile = collection.find(Filters.eq("uuid", uuid.toString())).firstOrNull()
|
||||
?: UUIDProfile(uuid.toString(), mutableMapOf())
|
||||
|
||||
profile.data[key.key.toString()] = convertToMongo(value)
|
||||
|
||||
collection.replaceOne(
|
||||
Filters.eq("uuid", uuid.toString()),
|
||||
profile,
|
||||
ReplaceOptions().upsert(true)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun <T : Any> doWrite(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
val document = collection.find(Document("uuid", uuid.toString())).firstOrNull() ?: return null
|
||||
document.data[key.key.toString()] = value
|
||||
|
||||
collection.replaceOne(Document("uuid", uuid.toString()), document)
|
||||
}
|
||||
|
||||
override fun serializeData(keys: Set<PersistentDataKey<*>>): Set<SerializedProfile> {
|
||||
val profiles = mutableSetOf<SerializedProfile>()
|
||||
|
||||
collection.find().forEach { document ->
|
||||
val uuid = UUID.fromString(document.getString("uuid"))
|
||||
val data = document.get("data") as Document
|
||||
val profileData = keys.associateWith { key ->
|
||||
when (key.type) {
|
||||
PersistentDataKeyType.STRING -> data.getString(key.key.key)
|
||||
PersistentDataKeyType.BOOLEAN -> data.getBoolean(key.key.key)
|
||||
PersistentDataKeyType.INT -> data.getInteger(key.key.key)
|
||||
PersistentDataKeyType.DOUBLE -> data.getDouble(key.key.key)
|
||||
PersistentDataKeyType.STRING_LIST -> data.getList(key.key.key, String::class.java)
|
||||
PersistentDataKeyType.BIG_DECIMAL -> data.getDecimal128(key.key.key)?.bigDecimalValue()
|
||||
PersistentDataKeyType.CONFIG -> data.get(key.key.key)
|
||||
else -> null
|
||||
} ?: key.defaultValue
|
||||
}
|
||||
|
||||
profiles.add(SerializedProfile(uuid, profileData as Map<PersistentDataKey<*>, Any>))
|
||||
}
|
||||
|
||||
return profiles
|
||||
}
|
||||
|
||||
override fun loadProfileData(data: Set<SerializedProfile>) {
|
||||
data.forEach { profile ->
|
||||
val document = Document("uuid", profile.uuid.toString())
|
||||
val profileData = Document()
|
||||
|
||||
profile.data.forEach { (key, value) ->
|
||||
profileData.put(key.key.key, value)
|
||||
}
|
||||
|
||||
document.put("data", profileData)
|
||||
collection.replaceOne(Document("uuid", profile.uuid.toString()), document, com.mongodb.client.model.ReplaceOptions().upsert(true))
|
||||
protected open fun convertFromMongo(value: Any): T {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
return value as T
|
||||
}
|
||||
}
|
||||
|
||||
@Serializable
|
||||
internal data class UUIDProfile(
|
||||
private data class UUIDProfile(
|
||||
// Storing UUID as strings for serialization
|
||||
@SerialName("_id") val uuid: String,
|
||||
|
||||
// Storing NamespacedKeys as strings for serialization
|
||||
val data: MutableMap<String, @Contextual Any>
|
||||
)
|
||||
|
||||
@@ -0,0 +1,235 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.willfp.eco.core.config.ConfigType
|
||||
import com.willfp.eco.core.config.Configs
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.config.readConfig
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import eu.decentsoftware.holograms.api.utils.scheduler.S
|
||||
import kotlinx.coroutines.flow.toList
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlinx.serialization.Contextual
|
||||
import kotlinx.serialization.SerialName
|
||||
import kotlinx.serialization.Serializable
|
||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||
import org.jetbrains.exposed.sql.Column
|
||||
import org.jetbrains.exposed.sql.Database
|
||||
import org.jetbrains.exposed.sql.SchemaUtils
|
||||
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
|
||||
import org.jetbrains.exposed.sql.Table
|
||||
import org.jetbrains.exposed.sql.TextColumnType
|
||||
import org.jetbrains.exposed.sql.and
|
||||
import org.jetbrains.exposed.sql.deleteWhere
|
||||
import org.jetbrains.exposed.sql.insert
|
||||
import org.jetbrains.exposed.sql.select
|
||||
import org.jetbrains.exposed.sql.selectAll
|
||||
import org.jetbrains.exposed.sql.transactions.transaction
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class MySQLPersistentDataHandler(
|
||||
plugin: EcoSpigotPlugin,
|
||||
config: Config
|
||||
) : PersistentDataHandler("mysql") {
|
||||
private val dataSource = HikariDataSource(HikariConfig().apply {
|
||||
driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||
username = config.getString("user")
|
||||
password = config.getString("password")
|
||||
jdbcUrl = "jdbc:mysql://" +
|
||||
"${config.getString("host")}:" +
|
||||
"${config.getString("port")}/" +
|
||||
config.getString("database")
|
||||
maximumPoolSize = config.getInt("connections")
|
||||
})
|
||||
|
||||
private val prefix = config.getString("prefix")
|
||||
|
||||
private val database = Database.connect(dataSource)
|
||||
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : DirectStoreSerializer<String>() {
|
||||
override val table = object : KeyTable<String>("string") {
|
||||
override val value = varchar("value", 128)
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : DirectStoreSerializer<Boolean>() {
|
||||
override val table = object : KeyTable<Boolean>("boolean") {
|
||||
override val value = bool("value")
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : DirectStoreSerializer<Int>() {
|
||||
override val table = object : KeyTable<Int>("int") {
|
||||
override val value = integer("value")
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : DirectStoreSerializer<Double>() {
|
||||
override val table = object : KeyTable<Double>("double") {
|
||||
override val value = double("value")
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : DirectStoreSerializer<BigDecimal>() {
|
||||
override val table = object : KeyTable<BigDecimal>("big_decimal") {
|
||||
// 34 digits of precision, 4 digits of scale
|
||||
override val value = decimal("value", 34, 4)
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : SingleValueSerializer<Config, String>() {
|
||||
override val table = object : KeyTable<String>("config") {
|
||||
override val value = text("value")
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: String): Config {
|
||||
return readConfig(value, ConfigType.JSON)
|
||||
}
|
||||
|
||||
override fun convertToStored(value: Config): String {
|
||||
// Store config as JSON
|
||||
return if (value.type == ConfigType.JSON) {
|
||||
value.toPlaintext()
|
||||
} else {
|
||||
Configs.fromMap(value.toMap(), ConfigType.JSON).toPlaintext()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MultiValueSerializer<String>() {
|
||||
override val table = object : ListKeyTable<String>("string_list") {
|
||||
override val value = varchar("value", 128)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
val savedUUIDs = mutableSetOf<UUID>()
|
||||
|
||||
for (keyType in PersistentDataKeyType.values()) {
|
||||
val serializer = keyType.getSerializer(this) as MySQLSerializer<*>
|
||||
savedUUIDs.addAll(serializer.getSavedUUIDs())
|
||||
}
|
||||
|
||||
return savedUUIDs
|
||||
}
|
||||
|
||||
private abstract inner class MySQLSerializer<T : Any> : DataTypeSerializer<T>() {
|
||||
protected abstract val table: ProfileTable
|
||||
|
||||
init {
|
||||
transaction(database) {
|
||||
SchemaUtils.create(table)
|
||||
}
|
||||
}
|
||||
|
||||
fun getSavedUUIDs(): Set<UUID> {
|
||||
return transaction(database) {
|
||||
table.selectAll().map { it[table.uuid] }.toSet()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// T is the key type
|
||||
// S is the stored value type
|
||||
private abstract inner class SingleValueSerializer<T : Any, S: Any> : MySQLSerializer<T>() {
|
||||
abstract override val table: KeyTable<S>
|
||||
|
||||
abstract fun convertToStored(value: T): S
|
||||
abstract fun convertFromStored(value: S): T
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
val stored = transaction(database) {
|
||||
table.select { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.limit(1)
|
||||
.singleOrNull()
|
||||
?.get(table.value)
|
||||
}
|
||||
|
||||
return stored?.let { convertFromStored(it) }
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
transaction(database) {
|
||||
table.insert {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.value] = convertToStored(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class DirectStoreSerializer<T: Any> : SingleValueSerializer<T, T>() {
|
||||
override fun convertToStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
|
||||
override fun convertFromStored(value: T): T {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class MultiValueSerializer<T: Any> : MySQLSerializer<List<T>>() {
|
||||
abstract override val table: ListKeyTable<T>
|
||||
|
||||
override fun readAsync(uuid: UUID, key: PersistentDataKey<List<T>>): List<T>? {
|
||||
val stored = transaction(database) {
|
||||
table.select { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
.orderBy(table.index)
|
||||
.map { it[table.value] }
|
||||
}
|
||||
|
||||
return stored
|
||||
}
|
||||
|
||||
override fun writeAsync(uuid: UUID, key: PersistentDataKey<List<T>>, value: List<T>) {
|
||||
transaction(database) {
|
||||
table.deleteWhere { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
|
||||
|
||||
value.forEachIndexed { index, t ->
|
||||
table.insert {
|
||||
it[table.uuid] = uuid
|
||||
it[table.key] = key.key.toString()
|
||||
it[table.index] = index
|
||||
it[table.value] = t
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ProfileTable(name: String) : Table(prefix + name) {
|
||||
val uuid = uuid("uuid")
|
||||
}
|
||||
|
||||
private abstract inner class KeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar("key", 128)
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key)
|
||||
|
||||
init {
|
||||
uniqueIndex()
|
||||
}
|
||||
}
|
||||
|
||||
private abstract inner class ListKeyTable<T>(name: String) : ProfileTable(name) {
|
||||
val key = varchar("key", 128)
|
||||
val index = integer("index")
|
||||
abstract val value: Column<T>
|
||||
|
||||
override val primaryKey = PrimaryKey(uuid, key, index)
|
||||
|
||||
init {
|
||||
uniqueIndex()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,13 @@
|
||||
package com.willfp.eco.internal.spigot.data.handlers
|
||||
|
||||
import com.willfp.eco.core.config.interfaces.Config
|
||||
import com.willfp.eco.core.data.handlers.DataTypeSerializer
|
||||
import com.willfp.eco.core.data.handlers.PersistentDataHandler
|
||||
import com.willfp.eco.core.data.handlers.SerializedProfile
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||
import java.math.BigDecimal
|
||||
import java.util.UUID
|
||||
|
||||
class YamlPersistentDataHandler(
|
||||
@@ -12,52 +15,59 @@ class YamlPersistentDataHandler(
|
||||
) : PersistentDataHandler("yaml") {
|
||||
private val dataYml = plugin.dataYml
|
||||
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
// Separate `as T?` for each branch to prevent compiler warnings.
|
||||
val value = when (key.type) {
|
||||
PersistentDataKeyType.INT -> dataYml.getIntOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.DOUBLE -> dataYml.getDoubleOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING -> dataYml.getStringOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BOOLEAN -> dataYml.getBoolOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.STRING_LIST -> dataYml.getStringsOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.CONFIG -> dataYml.getSubsectionOrNull("player.$uuid.${key.key}") as T?
|
||||
PersistentDataKeyType.BIG_DECIMAL -> dataYml.getBigDecimalOrNull("player.$uuid.${key.key}") as T?
|
||||
init {
|
||||
PersistentDataKeyType.STRING.registerSerializer(this, object : YamlSerializer<String>() {
|
||||
override fun read(config: Config, key: String) = config.getStringOrNull(key)
|
||||
})
|
||||
|
||||
else -> null
|
||||
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : YamlSerializer<Boolean>() {
|
||||
override fun read(config: Config, key: String) = config.getBoolOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.INT.registerSerializer(this, object : YamlSerializer<Int>() {
|
||||
override fun read(config: Config, key: String) = config.getIntOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : YamlSerializer<Double>() {
|
||||
override fun read(config: Config, key: String) = config.getDoubleOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : YamlSerializer<List<String>>() {
|
||||
override fun read(config: Config, key: String) = config.getStringsOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.CONFIG.registerSerializer(this, object : YamlSerializer<Config>() {
|
||||
override fun read(config: Config, key: String) = config.getSubsectionOrNull(key)
|
||||
})
|
||||
|
||||
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : YamlSerializer<BigDecimal>() {
|
||||
override fun read(config: Config, key: String) = config.getBigDecimalOrNull(key)
|
||||
})
|
||||
}
|
||||
|
||||
override fun getSavedUUIDs(): Set<UUID> {
|
||||
return dataYml.getSubsection("player").getKeys(false)
|
||||
.map { UUID.fromString(it) }
|
||||
.toSet()
|
||||
}
|
||||
|
||||
override fun shouldAutosave(): Boolean {
|
||||
return true
|
||||
}
|
||||
|
||||
override fun doSave() {
|
||||
dataYml.save()
|
||||
}
|
||||
|
||||
private abstract inner class YamlSerializer<T: Any>: DataTypeSerializer<T>() {
|
||||
protected abstract fun read(config: Config, key: String): T?
|
||||
|
||||
final override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||
return read(dataYml, "player.$uuid.${key.key}")
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
dataYml.set("player.$uuid.$key", value)
|
||||
}
|
||||
|
||||
override fun serializeData(keys: Set<PersistentDataKey<*>>): Set<SerializedProfile> {
|
||||
val profiles = mutableSetOf<SerializedProfile>()
|
||||
val uuids = dataYml.getSubsection("player").getKeys(false).map { UUID.fromString(it) }
|
||||
|
||||
for (uuid in uuids) {
|
||||
val data = mutableMapOf<PersistentDataKey<*>, Any>()
|
||||
|
||||
for (key in keys) {
|
||||
data[key] = read(uuid, key) ?: continue
|
||||
}
|
||||
|
||||
profiles.add(SerializedProfile(uuid, data))
|
||||
}
|
||||
|
||||
return profiles
|
||||
}
|
||||
|
||||
override fun loadProfileData(data: Set<SerializedProfile>) {
|
||||
for (profile in data) {
|
||||
for ((key, value) in profile.data) {
|
||||
// Dirty cast, but it's fine because we know it's the same type
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
write(profile.uuid, key as PersistentDataKey<Any>, value as Any)
|
||||
}
|
||||
final override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||
dataYml.set("player.$uuid.${key.key}", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,12 +20,13 @@ mongodb:
|
||||
database: "eco"
|
||||
|
||||
mysql:
|
||||
# How many threads to execute statements on. Higher numbers can be faster however
|
||||
# very high numbers can cause issues with OS configuration. If writes are taking
|
||||
# too long, increase this value.
|
||||
threads: 2
|
||||
# The table prefix to use for all tables.
|
||||
prefix: "eco_"
|
||||
|
||||
# The maximum number of MySQL connections.
|
||||
connections: 10
|
||||
|
||||
# Connection details for MySQL.
|
||||
host: localhost
|
||||
port: 3306
|
||||
database: database
|
||||
|
||||
Reference in New Issue
Block a user