Compare commits

...

8 Commits

Author SHA1 Message Date
Auxilor
55fc6d762f Updated folia scheduler 2023-05-09 13:48:57 +01:00
Auxilor
b9c5eb2b4e Merge branch 'master' into folia
# Conflicts:
#	eco-core/core-plugin/build.gradle
2023-05-09 13:00:09 +01:00
Auxilor
37e271c96c More optimisations to EcoConfig 2023-05-04 14:32:52 +01:00
Auxilor
3dad48e24d Updated to 6.57.2 2023-05-03 23:45:01 +01:00
Auxilor
ae77e4810b Digsusting hacks to optimise eval pipeline 2023-05-03 23:44:53 +01:00
Auxilor
3d50e37c37 Merge branch 'master' into develop 2023-05-03 23:01:41 +01:00
Auxilor
421fd3bd04 Finally removed LegacyMySQLDataHandler 2023-05-03 16:03:36 +01:00
Auxilor
393d0031c7 Added folia support 2023-03-12 12:11:12 +00:00
16 changed files with 225 additions and 451 deletions

View File

@@ -26,7 +26,7 @@ public class Prerequisite {
*/
public static final Prerequisite HAS_PAPER = new Prerequisite(
() -> ClassUtils.exists("com.destroystokyo.paper.event.block.BeaconEffectEvent"),
"Requires server to be running paper (or a fork)"
"Requires server to be running paper"
);
/**
@@ -69,7 +69,7 @@ public class Prerequisite {
@Deprecated(since = "6.49.0", forRemoval = true)
public static final Prerequisite HAS_BUNGEECORD = new Prerequisite(
() -> ClassUtils.exists("net.md_5.bungee.api.event.ServerConnectedEvent"),
"Requires server to be running BungeeCord (or a fork)"
"Requires server to be running BungeeCord"
);
/**
@@ -80,7 +80,15 @@ public class Prerequisite {
@Deprecated(since = "6.49.0", forRemoval = true)
public static final Prerequisite HAS_VELOCITY = new Prerequisite(
() -> ClassUtils.exists("com.velocitypowered.api.event.player.ServerConnectedEvent"),
"Requires server to be running Velocity (or a fork)"
"Requires server to be running Velocity"
);
/**
* Requires the server to be running an implementation of Folia.
*/
public static final Prerequisite HAS_FOLIA = new Prerequisite(
() -> ClassUtils.exists("io.papermc.paper.threadedregions.scheduler.RegionisedScheduler"),
"Requires server to be running Folia!"
);
/**

View File

@@ -1,6 +1,8 @@
package com.willfp.eco.core.scheduling;
import com.willfp.eco.core.EcoPlugin;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.scheduler.BukkitTask;
import org.jetbrains.annotations.NotNull;
@@ -14,9 +16,13 @@ public interface Scheduler {
* @param runnable The lambda to run.
* @param ticksLater The amount of ticks to wait before execution.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
BukkitTask runLater(@NotNull Runnable runnable,
long ticksLater);
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runLater(@NotNull Runnable runnable,
long ticksLater) {
return runLater(new Location(Bukkit.getWorlds().get(0), 0, 0, 0), (int) ticksLater, runnable);
}
/**
* Run the task after a specified tick delay.
@@ -26,10 +32,12 @@ public interface Scheduler {
* @param runnable The lambda to run.
* @param ticksLater The amount of ticks to wait before execution.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runLater(long ticksLater,
@NotNull Runnable runnable) {
return runLater(runnable, ticksLater);
return runLater(new Location(Bukkit.getWorlds().get(0), 0, 0, 0), (int) ticksLater, runnable);
}
/**
@@ -39,10 +47,14 @@ public interface Scheduler {
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
BukkitTask runTimer(@NotNull Runnable runnable,
long delay,
long repeat);
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runTimer(@NotNull Runnable runnable,
long delay,
long repeat) {
return runTimer(new Location(Bukkit.getWorlds().get(0), 0, 0, 0), (int) delay, (int) repeat, runnable);
}
/**
* Run the task repeatedly on a timer.
@@ -53,11 +65,13 @@ public interface Scheduler {
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runTimer(long delay,
long repeat,
@NotNull Runnable runnable) {
return runTimer(runnable, delay, repeat);
return runTimer(new Location(Bukkit.getWorlds().get(0), 0, 0, 0), (int) delay, (int) repeat, runnable);
}
/**
@@ -67,10 +81,14 @@ public interface Scheduler {
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
BukkitTask runAsyncTimer(@NotNull Runnable runnable,
long delay,
long repeat);
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runAsyncTimer(@NotNull Runnable runnable,
long delay,
long repeat) {
return runTimerAsync((int) delay, (int) repeat, runnable);
}
/**
* Run the task repeatedly and asynchronously on a timer.
@@ -81,11 +99,13 @@ public interface Scheduler {
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask runAsyncTimer(long delay,
long repeat,
@NotNull Runnable runnable) {
return runAsyncTimer(runnable, delay, repeat);
return runTimerAsync((int) delay, (int) repeat, runnable);
}
/**
@@ -93,28 +113,28 @@ public interface Scheduler {
*
* @param runnable The lambda to run.
* @return The created {@link BukkitTask}.
* @deprecated Does not work with Folia.
*/
BukkitTask run(@NotNull Runnable runnable);
@Deprecated(since = "6.53.0", forRemoval = true)
default BukkitTask run(@NotNull Runnable runnable) {
return run(new Location(Bukkit.getWorlds().get(0), 0, 0, 0), runnable);
}
/**
* Run the task asynchronously.
*
* @param runnable The lambda to run.
* @return The created {@link BukkitTask}.
*/
BukkitTask runAsync(@NotNull Runnable runnable);
/**
* Schedule the task to be ran repeatedly on a timer.
* Schedule the task to be run repeatedly on a timer.
*
* @param runnable The lambda to run.
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The id of the task.
* @deprecated Not needed.
*/
int syncRepeating(@NotNull Runnable runnable,
long delay,
long repeat);
@Deprecated(since = "6.53.0", forRemoval = true)
default int syncRepeating(@NotNull Runnable runnable,
long delay,
long repeat) {
return runTimer(runnable, delay, repeat).getTaskId();
}
/**
* Schedule the task to be ran repeatedly on a timer.
@@ -125,15 +145,73 @@ public interface Scheduler {
* @param delay The amount of ticks to wait before the first execution.
* @param repeat The amount of ticks to wait between executions.
* @return The id of the task.
* @deprecated Not needed.
*/
@Deprecated(since = "6.53.0", forRemoval = true)
default int syncRepeating(long delay,
long repeat,
@NotNull Runnable runnable) {
return syncRepeating(runnable, delay, repeat);
return runTimer(runnable, delay, repeat).getTaskId();
}
/**
* Cancel all running tasks from the linked {@link EcoPlugin}.
*/
void cancelAll();
/**
* Run a task asynchronously.
*
* @param task The lambda to run.
* @return The created {@link BukkitTask}.
*/
BukkitTask runAsync(@NotNull Runnable task);
/**
* Run a task.
*
* @param location The location.
* @param task The task.
* @return The created {@link BukkitTask}.
*/
BukkitTask run(@NotNull Location location,
@NotNull Runnable task);
/**
* Run a task after a delay.
*
* @param location The location.
* @param ticksLater The delay.
* @param task The task.
* @return The created {@link BukkitTask}.
*/
BukkitTask runLater(@NotNull Location location,
int ticksLater,
@NotNull Runnable task);
/**
* Run a task on a timer.
*
* @param location The location.
* @param delay The delay.
* @param repeat The repeat delay.
* @param task The task.
* @return The created {@link BukkitTask}.
*/
BukkitTask runTimer(@NotNull Location location,
int delay,
int repeat,
@NotNull Runnable task);
/**
* Run a task asynchronously on a timer.
*
* @param delay The delay.
* @param repeat The repeat delay.
* @param task The task.
* @return The created {@link BukkitTask}.
*/
BukkitTask runTimerAsync(int delay,
int repeat,
@NotNull Runnable task);
}

View File

@@ -7,7 +7,6 @@ import com.willfp.eco.core.placeholder.context.PlaceholderContext
import com.willfp.eco.internal.fast.listView
import com.willfp.eco.util.StringUtils
import org.bukkit.configuration.file.YamlConfiguration
import java.util.Objects
import java.util.concurrent.ConcurrentHashMap
@Suppress("UNCHECKED_CAST")
@@ -235,19 +234,8 @@ open class EcoConfig(
return false
}
if (configType != other.configType) {
return false
}
if (values != other.values) {
return false
}
if (injections != other.injections) {
return false
}
return true
// Hey! Don't care. This works.
return this.hashCode() == other.hashCode()
}
override fun hashCode(): Int {
@@ -259,13 +247,13 @@ open class EcoConfig(
var injectionHash = 0
for (injection in injections.values) {
injectionHash = injectionHash * 31 + injection.hashCode()
injections.forEachValue(5) {
injectionHash = injectionHash xor (it.hashCode() shl 5)
}
return Objects.hash(
values,
configType
) + injectionHash
// hashCode() has to compute extremely quickly, so we're using bitwise, because why not?
// Fucking filthy to use identityHashCode here, but it should be extremely fast
val identityHash = System.identityHashCode(this)
return (identityHash shl 5) - (identityHash xor configType.hashCode()) + injectionHash
}
}

View File

@@ -14,4 +14,4 @@ class EcoRunnableFactory(private val plugin: EcoPlugin) : RunnableFactory {
}
}
}
}
}

View File

@@ -1,51 +0,0 @@
package com.willfp.eco.internal.scheduling
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.scheduling.Scheduler
import org.bukkit.Bukkit
import org.bukkit.scheduler.BukkitTask
class EcoScheduler(private val plugin: EcoPlugin) : Scheduler {
override fun runLater(
runnable: Runnable,
ticksLater: Long
): BukkitTask {
return Bukkit.getScheduler().runTaskLater(plugin, runnable, ticksLater)
}
override fun runTimer(
runnable: Runnable,
delay: Long,
repeat: Long
): BukkitTask {
return Bukkit.getScheduler().runTaskTimer(plugin, runnable, delay, repeat)
}
override fun runAsyncTimer(
runnable: Runnable,
delay: Long,
repeat: Long
): BukkitTask {
return Bukkit.getScheduler().runTaskTimerAsynchronously(plugin, runnable, delay, repeat)
}
override fun run(runnable: Runnable): BukkitTask {
return Bukkit.getScheduler().runTask(plugin, runnable)
}
override fun runAsync(runnable: Runnable): BukkitTask {
return Bukkit.getScheduler().runTaskAsynchronously(plugin, runnable)
}
override fun syncRepeating(
runnable: Runnable,
delay: Long,
repeat: Long
): Int {
return Bukkit.getScheduler().scheduleSyncRepeatingTask(plugin, runnable, delay, repeat)
}
override fun cancelAll() {
Bukkit.getScheduler().cancelTasks(plugin)
}
}

View File

@@ -0,0 +1,33 @@
package com.willfp.eco.internal.scheduling
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.scheduling.Scheduler
import org.bukkit.Bukkit
import org.bukkit.Location
import org.bukkit.scheduler.BukkitTask
class EcoSchedulerSpigot(private val plugin: EcoPlugin) : Scheduler {
override fun runLater(location: Location, ticksLater: Int, task: Runnable): BukkitTask {
return Bukkit.getScheduler().runTaskLater(plugin, task, ticksLater.toLong())
}
override fun runTimer(location: Location, delay: Int, repeat: Int, task: Runnable): BukkitTask {
return Bukkit.getScheduler().runTaskTimer(plugin, task, delay.toLong(), repeat.toLong())
}
override fun run(location: Location, task: Runnable): BukkitTask {
return Bukkit.getScheduler().runTask(plugin, task)
}
override fun runAsync(task: Runnable): BukkitTask {
return Bukkit.getScheduler().runTaskAsynchronously(plugin, task)
}
override fun runTimerAsync(delay: Int, repeat: Int, task: Runnable): BukkitTask {
return Bukkit.getScheduler().runTaskTimerAsynchronously(plugin, task, delay.toLong(), repeat.toLong())
}
override fun cancelAll() {
Bukkit.getScheduler().cancelTasks(plugin)
}
}

View File

@@ -0,0 +1,6 @@
group = "com.willfp"
version = rootProject.version
dependencies {
compileOnly("dev.folia:folia-api:1.19.4-R0.1-SNAPSHOT")
}

View File

@@ -0,0 +1,49 @@
package com.willfp.eco.internal.scheduling
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.scheduling.Scheduler
import org.bukkit.Bukkit
import org.bukkit.Location
import org.bukkit.scheduler.BukkitTask
import java.util.concurrent.TimeUnit
class EcoSchedulerFolia(private val plugin: EcoPlugin) : Scheduler {
override fun runLater(runnable: Runnable, ticksLater: Long): BukkitTask {
Bukkit.getGlobalRegionScheduler().runDelayed(plugin, { runnable.run() }, ticksLater)
}
override fun runLater(location: Location, ticksLater: Int, task: Runnable): BukkitTask {
Bukkit.getRegionScheduler().runDelayed(plugin, location, { task.run() }, ticksLater.toLong())
}
override fun runTimer(delay: Long, repeat: Long, runnable: Runnable): BukkitTask {
Bukkit.getGlobalRegionScheduler().runAtFixedRate(plugin, { runnable.run() }, delay, repeat)
}
override fun runTimer(location: Location, delay: Int, repeat: Int, task: Runnable): BukkitTask {
Bukkit.getRegionScheduler().runAtFixedRate(plugin, location, { task.run() }, delay.toLong(), repeat.toLong())
}
override fun run(runnable: Runnable): BukkitTask {
Bukkit.getGlobalRegionScheduler().run(plugin) { runnable.run() }
}
override fun run(location: Location, task: Runnable): BukkitTask {
Bukkit.getRegionScheduler().run(plugin, location) { task.run() }
}
override fun runAsync(task: Runnable): BukkitTask {
Bukkit.getAsyncScheduler().runNow(plugin) { task.run() }
}
override fun runTimerAsync(delay: Int, repeat: Int, task: Runnable): BukkitTask {
Bukkit.getAsyncScheduler()
.runAtFixedRate(plugin, { task.run() }, delay * 50L, repeat * 50L, TimeUnit.MILLISECONDS)
}
override fun cancelAll() {
Bukkit.getScheduler().cancelTasks(plugin)
Bukkit.getAsyncScheduler().cancelTasks(plugin)
Bukkit.getGlobalRegionScheduler().cancelTasks(plugin)
}
}

View File

@@ -4,6 +4,7 @@ import com.willfp.eco.core.Eco
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.PluginLike
import com.willfp.eco.core.PluginProps
import com.willfp.eco.core.Prerequisite
import com.willfp.eco.core.command.CommandBase
import com.willfp.eco.core.command.PluginCommandBase
import com.willfp.eco.core.config.ConfigType
@@ -39,7 +40,8 @@ import com.willfp.eco.internal.integrations.PAPIExpansion
import com.willfp.eco.internal.logging.EcoLogger
import com.willfp.eco.internal.placeholder.PlaceholderParser
import com.willfp.eco.internal.proxy.EcoProxyFactory
import com.willfp.eco.internal.scheduling.EcoScheduler
import com.willfp.eco.internal.scheduling.EcoSchedulerFolia
import com.willfp.eco.internal.scheduling.EcoSchedulerSpigot
import com.willfp.eco.internal.spigot.data.DataYml
import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.ProfileHandler
@@ -101,7 +103,7 @@ class EcoImpl : EcoSpigotPlugin(), Eco {
)
override fun createScheduler(plugin: EcoPlugin) =
EcoScheduler(plugin)
if (Prerequisite.HAS_FOLIA.isMet) EcoSchedulerFolia(plugin) else EcoSchedulerSpigot(plugin)
override fun createEventManager(plugin: EcoPlugin) =
EcoEventManager(plugin)

View File

@@ -9,7 +9,6 @@ import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.ServerLocking
import com.willfp.eco.internal.spigot.data.storage.DataHandler
import com.willfp.eco.internal.spigot.data.storage.HandlerType
import com.willfp.eco.internal.spigot.data.storage.LegacyMySQLDataHandler
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
@@ -28,17 +27,6 @@ class ProfileHandler(
HandlerType.YAML -> YamlDataHandler(plugin, this)
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
HandlerType.MONGO -> MongoDataHandler(plugin, this)
HandlerType.LEGACY_MYSQL -> LegacyMySQLDataHandler(plugin, this)
}
init {
if (handler.type == HandlerType.LEGACY_MYSQL) {
plugin.logger.warning("You're using the legacy MySQL handler!")
plugin.logger.warning("Some features will not work and you may get unfixable errors.")
plugin.logger.warning("Support cannot be given to data issues related to legacy MySQL.")
plugin.logger.warning("Change your data handler to mysql, mongo, or yaml to fix this!")
plugin.logger.warning("This can be done in /plugins/eco/config.yml")
}
}
fun accessLoadedProfile(uuid: UUID): EcoProfile? =
@@ -90,11 +78,7 @@ class ProfileHandler(
}
var previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
if (previousHandlerType == HandlerType.MYSQL && !plugin.dataYml.has("new-mysql")) {
previousHandlerType = HandlerType.LEGACY_MYSQL
}
val previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
if (previousHandlerType == type) {
return
@@ -104,7 +88,6 @@ class ProfileHandler(
HandlerType.YAML -> YamlDataHandler(plugin, this)
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
HandlerType.MONGO -> MongoDataHandler(plugin, this)
HandlerType.LEGACY_MYSQL -> LegacyMySQLDataHandler(plugin, this)
}
ServerLocking.lock("Migrating player data! Check console for more information.")

View File

@@ -3,6 +3,5 @@ package com.willfp.eco.internal.spigot.data.storage
enum class HandlerType {
YAML,
MYSQL,
MONGO,
LEGACY_MYSQL
MONGO
}

View File

@@ -1,315 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.github.benmanes.caffeine.cache.Caffeine
import com.google.common.util.concurrent.ThreadFactoryBuilder
import com.willfp.eco.core.Eco
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.ProfileHandler
import com.willfp.eco.internal.spigot.data.serverProfileUUID
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import org.jetbrains.exposed.dao.id.UUIDTable
import org.jetbrains.exposed.sql.BooleanColumnType
import org.jetbrains.exposed.sql.Column
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.DoubleColumnType
import org.jetbrains.exposed.sql.IntegerColumnType
import org.jetbrains.exposed.sql.ResultRow
import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.VarCharColumnType
import org.jetbrains.exposed.sql.insert
import org.jetbrains.exposed.sql.select
import org.jetbrains.exposed.sql.transactions.transaction
import org.jetbrains.exposed.sql.update
import java.util.UUID
import java.util.concurrent.Callable
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
/*
The MySQL data handler is hot garbage for several reasons:
- Using MySQL on unstructured data: it's being horrifically misused, but that's just how it has to be.
- Can't remove un-needed keys, there's wasted space in the columns everywhere.
- No native support for the STRING_LIST type, instead it 'serializes' the lists with semicolons as separators.
- General lack of flexibility, it's too rigid.
That's why I added the MongoDB handler, it's far, far better suited for what eco does - use it over
MySQL if you can.
Oh, also - I don't really know how this class works. I've rewritten it and hacked it together several ways
in several sessions, and it's basically complete gibberish to me. Adding the STRING_LIST type is probably
the worst bodge I've shipped in production.
*/
@Suppress("UNCHECKED_CAST")
class LegacyMySQLDataHandler(
plugin: EcoSpigotPlugin,
handler: ProfileHandler
) : DataHandler(HandlerType.LEGACY_MYSQL) {
private val database: Database
private val playerHandler: ImplementedMySQLHandler
private val serverHandler: ImplementedMySQLHandler
init {
val config = HikariConfig()
config.driverClassName = "com.mysql.cj.jdbc.Driver"
config.username = plugin.configYml.getString("mysql.user")
config.password = plugin.configYml.getString("mysql.password")
config.jdbcUrl = "jdbc:mysql://" +
"${plugin.configYml.getString("mysql.host")}:" +
"${plugin.configYml.getString("mysql.port")}/" +
plugin.configYml.getString("mysql.database")
config.maximumPoolSize = plugin.configYml.getInt("mysql.connections")
database = Database.connect(HikariDataSource(config))
playerHandler = ImplementedMySQLHandler(
handler,
UUIDTable("eco_players"),
plugin
)
serverHandler = ImplementedMySQLHandler(
handler,
UUIDTable("eco_server"),
plugin
)
}
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
return applyFor(uuid) {
it.read(uuid, key)
}
}
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
applyFor(uuid) {
it.write(uuid, key, value)
}
}
override fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
applyFor(uuid) {
it.saveKeysForRow(uuid, keys)
}
}
private inline fun <R> applyFor(uuid: UUID, function: (ImplementedMySQLHandler) -> R): R {
return if (uuid == serverProfileUUID) {
function(serverHandler)
} else {
function(playerHandler)
}
}
override fun initialize() {
playerHandler.initialize()
serverHandler.initialize()
}
@Suppress("UNCHECKED_CAST")
private inner class ImplementedMySQLHandler(
private val handler: ProfileHandler,
private val table: UUIDTable,
private val plugin: EcoPlugin
) {
private val rows = Caffeine.newBuilder()
.expireAfterWrite(3, TimeUnit.SECONDS)
.build<UUID, ResultRow>()
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-legacy-mysql-thread-%d").build()
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
val registeredKeys = mutableSetOf<PersistentDataKey<*>>()
init {
transaction(database) {
SchemaUtils.create(table)
}
}
fun initialize() {
transaction(database) {
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
}
}
fun ensureKeyRegistration(key: PersistentDataKey<*>) {
if (table.columns.any { it.name == key.key.toString() }) {
registeredKeys.add(key)
return
}
registerColumn(key)
registeredKeys.add(key)
}
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
getRow(uuid)
doWrite(uuid, key, key.type.constrainSQLTypes(value))
}
private fun doWrite(uuid: UUID, key: PersistentDataKey<*>, constrainedValue: Any) {
val column: Column<Any> = getColumn(key) as Column<Any>
executor.submit {
transaction(database) {
table.update({ table.id eq uuid }) {
it[column] = constrainedValue
}
}
}
}
fun saveKeysForRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
saveRow(uuid, keys)
}
private fun saveRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
val profile = handler.loadGenericProfile(uuid)
executor.submit {
transaction(database) {
getRow(uuid)
for (key in keys) {
doWrite(uuid, key, key.type.constrainSQLTypes(profile.read(key)))
}
}
}
}
fun <T> read(uuid: UUID, key: PersistentDataKey<T>): T? {
val doRead = Callable<T?> {
transaction(database) {
val row = getRow(uuid)
val column = getColumn(key)
val raw = row[column]
key.type.fromConstrained(raw)
}
}
ensureKeyRegistration(key) // DON'T DELETE THIS LINE! I know it's covered in getColumn, but I need to do it here as well.
doRead.call()
return if (Eco.get().ecoPlugin.configYml.getBool("mysql.async-reads")) {
executor.submit(doRead).get()
} else {
doRead.call()
}
}
private fun <T> registerColumn(key: PersistentDataKey<T>) {
try {
transaction(database) {
try {
table.apply {
if (table.columns.any { it.name == key.key.toString() }) {
return@apply
}
when (key.type) {
PersistentDataKeyType.INT -> registerColumn<Int>(key.key.toString(), IntegerColumnType())
.default(key.defaultValue as Int)
PersistentDataKeyType.DOUBLE -> registerColumn<Double>(
key.key.toString(),
DoubleColumnType()
).default(key.defaultValue as Double)
PersistentDataKeyType.BOOLEAN -> registerColumn<Boolean>(
key.key.toString(),
BooleanColumnType()
).default(key.defaultValue as Boolean)
PersistentDataKeyType.STRING -> registerColumn<String>(
key.key.toString(),
VarCharColumnType(512)
).default(key.defaultValue as String)
PersistentDataKeyType.STRING_LIST -> registerColumn<String>(
key.key.toString(),
VarCharColumnType(8192)
).default(PersistentDataKeyType.STRING_LIST.constrainSQLTypes(key.defaultValue as List<String>) as String)
PersistentDataKeyType.CONFIG -> throw IllegalArgumentException(
"Config Persistent Data Keys are not supported by the legacy MySQL handler!"
)
else -> throw NullPointerException("Null value found!")
}
}
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
} catch (e: Exception) {
plugin.logger.info("MySQL Error 1!")
e.printStackTrace()
// What's that? Two enormous exception catches? That's right! This code sucks.
}
}
} catch (e: Exception) {
plugin.logger.info("MySQL Error 2!")
e.printStackTrace()
// It might fail. Who cares? This is legacy.
}
}
private fun getColumn(key: PersistentDataKey<*>): Column<*> {
ensureKeyRegistration(key)
val name = key.key.toString()
return table.columns.first { it.name == name }
}
private fun getRow(uuid: UUID): ResultRow {
fun select(uuid: UUID): ResultRow? {
return transaction(database) {
table.select { table.id eq uuid }.limit(1).singleOrNull()
}
}
return rows.get(uuid) {
val row = select(uuid)
return@get if (row != null) {
row
} else {
transaction(database) {
table.insert { it[id] = uuid }
}
select(uuid)
}
}
}
}
}
private fun <T> PersistentDataKeyType<T>.constrainSQLTypes(value: Any): Any {
return if (this == PersistentDataKeyType.STRING_LIST) {
@Suppress("UNCHECKED_CAST")
value as List<String>
value.joinToString(separator = ";")
} else {
value
}
}
private fun <T> PersistentDataKeyType<T>.fromConstrained(constrained: Any?): T? {
if (constrained == null) {
return null
}
@Suppress("UNCHECKED_CAST")
return if (this == PersistentDataKeyType.STRING_LIST) {
constrained as String
constrained.split(";").toList()
} else {
constrained
} as T
}

View File

@@ -34,7 +34,7 @@ Whatever. At least it works.
@Suppress("UNCHECKED_CAST")
class MySQLDataHandler(
private val plugin: EcoSpigotPlugin,
plugin: EcoSpigotPlugin,
private val handler: ProfileHandler
) : DataHandler(HandlerType.MYSQL) {
private val database: Database
@@ -149,9 +149,4 @@ class MySQLDataHandler(
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
}
}
override fun save() {
plugin.dataYml.set("new-mysql", true)
plugin.dataYml.save()
}
}

View File

@@ -4,7 +4,6 @@ import com.github.benmanes.caffeine.cache.Cache
import com.github.benmanes.caffeine.cache.Caffeine
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.placeholder.context.PlaceholderContext
import java.util.Objects
import java.util.concurrent.TimeUnit
class DelegatedExpressionHandler(
@@ -16,11 +15,10 @@ class DelegatedExpressionHandler(
.build()
override fun evaluate(expression: String, context: PlaceholderContext): Double {
val hash = Objects.hash(
expression,
context.player?.uniqueId,
context.injectableContext
)
// Peak performance (totally not having fun with bitwise operators)
val hash = (((expression.hashCode() shl 5) - expression.hashCode()) xor
(context.player?.uniqueId?.hashCode() ?: 0)
) xor context.injectableContext.hashCode()
return evaluationCache.get(hash) {
handler.evaluate(expression, context)

View File

@@ -1,3 +1,3 @@
version = 6.57.1
version = 6.57.2
plugin-name = eco
kotlin.code.style = official

View File

@@ -20,4 +20,5 @@ include(":eco-core:core-nms:v1_19_R2")
include(":eco-core:core-nms:v1_19_R3")
include(":eco-core:core-proxy")
include(":eco-core:core-plugin")
include(":eco-core:core-backend")
include(":eco-core:core-backend")
include(":eco-core:core-folia")