9
0
mirror of https://github.com/VolmitSoftware/Iris.git synced 2025-12-30 12:29:20 +00:00

new structure

This commit is contained in:
RePixelatedMC
2023-12-17 17:42:28 +01:00
parent c0fdf23133
commit 5507c35547
5 changed files with 86 additions and 201 deletions

View File

@@ -142,8 +142,6 @@ public class IrisSettings {
public int objectLoaderCacheSize = 4_096;
public int scriptLoaderCacheSize = 512;
public int tectonicUnloadThreads = -1; // -1 = Disabled and instead use the dynamic method
public boolean AggressiveTectonicUnload = false;
public int AggressiveTectonicThreshold = -1; // -1 = Disabled and instead uses the tectonicLimit
}
@Data

View File

@@ -63,7 +63,6 @@ public class CommandDeveloper implements DecreeExecutor {
Iris.info(C.DARK_PURPLE + "Tectonic Unload Duration: " + C.LIGHT_PURPLE + Form.duration((long) engine.getMantle().getTectonicDuration()));
Iris.info(C.DARK_PURPLE + "Cache Size: " + C.LIGHT_PURPLE + Form.f(IrisData.cacheSize()));
Iris.info(C.DARK_PURPLE + "LastUse Size: " + C.LIGHT_PURPLE + Form.mem(lastUseSize));
Iris.info(C.DARK_PURPLE + "Agressive Unload: " + C.LIGHT_PURPLE + IrisSettings.get().getPerformance().AggressiveTectonicUnload);
Iris.info("-------------------------");
} else {
Iris.info(C.RED + "Engine is null!");

View File

@@ -52,7 +52,7 @@ public class ModesSFG {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// Handle interruption
// no
}
}
}

View File

@@ -13,112 +13,51 @@ import org.bukkit.World;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import static com.volmit.iris.util.mantle.Mantle.tectonicLimit;
public class IrisEngineSVC implements IrisService {
private JavaPlugin plugin;
public Looper ticker1;
public Looper ticker2;
public Looper engineTicker;
public World selectedWorld;
public List<World> IrisWorlds = new ArrayList<>();
public Looper trimTicker;
public Looper unloadTicker;
public List<World> corruptedIrisWorlds = new ArrayList<>();
// todo make this work with multiple worlds
@Override
public void onEnable() {
this.plugin = Iris.instance;
tectonicLimit.set(2);
long t = getHardware.getProcessMemory();
for (; t > 250; ) {
while (t > 250) {
tectonicLimit.getAndAdd(1);
t = t - 250;
}
tectonicLimit.set(10); // DEBUG CODE
this.IrisEngine();
engineTicker.start();
ticker1.start();
ticker2.start();
this.setup();
trimTicker.start();
unloadTicker.start();
}
private final AtomicReference<World> selectedWorldRef = new AtomicReference<>();
private void setup() {
trimTicker = new Looper() {
private final Supplier<Engine> supplier = createSupplier();
private Engine engine = supplier.get();
public CompletableFuture<World> initializeAsync() {
return CompletableFuture.supplyAsync(() -> {
World selectedWorld = null;
while (selectedWorld == null) {
synchronized (this) {
IrisWorlds.clear();
for (World w : Bukkit.getServer().getWorlds()) {
if (IrisToolbelt.access(w) != null) {
IrisWorlds.add(w);
}
}
if (!IrisWorlds.isEmpty()) {
Random rand = new Random();
int randomIndex = rand.nextInt(IrisWorlds.size());
selectedWorld = IrisWorlds.get(randomIndex);
}
}
if (selectedWorld == null) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
}
return selectedWorld;
});
}
public void IrisEngine() {
engineTicker = new Looper() {
@Override
protected long loop() {
try {
World world = selectedWorldRef.get();
PlatformChunkGenerator generator = IrisToolbelt.access(world);
if (generator == null) {
initializeAsync().thenAcceptAsync(foundWorld -> selectedWorldRef.set(foundWorld));
} else {
selectedWorld = world;
}
selectedWorld = Bukkit.getWorld("localmemtest"); // debug code
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
return -1;
}
return 1000;
}
};
ticker1 = new Looper() {
@Override
protected long loop() {
try {
World world = selectedWorld;
PlatformChunkGenerator generator = IrisToolbelt.access(world);
if (generator != null) {
Engine engine = IrisToolbelt.access(world).getEngine();
if (generator != null && generator.getEngine() != null) {
engine.getMantle().trim();
} else {
Iris.info("something is null 1");
}
if (engine != null) {
engine.getMantle().trim();
}
engine = supplier.get();
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
@@ -129,20 +68,17 @@ public class IrisEngineSVC implements IrisService {
}
};
ticker2 = new Looper() {
unloadTicker = new Looper() {
private final Supplier<Engine> supplier = createSupplier();
private Engine engine = supplier.get();
@Override
protected long loop() {
try {
World world = selectedWorld;
PlatformChunkGenerator generator = IrisToolbelt.access(world);
if (generator != null) {
Engine engine = IrisToolbelt.access(world).getEngine();
if (generator != null && generator.getEngine() != null) {
engine.getMantle().unloadTectonicPlate();
} else {
Iris.info("something is null 2");
}
if (engine != null) {
engine.getMantle().unloadTectonicPlate();
}
engine = supplier.get();
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
@@ -153,11 +89,30 @@ public class IrisEngineSVC implements IrisService {
};
}
private Supplier<Engine> createSupplier() {
AtomicInteger i = new AtomicInteger();
return () -> {
List<World> worlds = Bukkit.getWorlds();
if (i.get() >= worlds.size()) {
i.set(0);
}
for (int j = 0; j < worlds.size(); j++) {
PlatformChunkGenerator generator = IrisToolbelt.access(worlds.get(i.getAndIncrement()));
if (i.get() >= worlds.size()) {
i.set(0);
}
if (generator != null && generator.getEngine() != null) {
return generator.getEngine();
}
}
return null;
};
}
@Override
public void onDisable() {
ticker1.interrupt();
ticker2.interrupt();
engineTicker.interrupt();
trimTicker.interrupt();
unloadTicker.interrupt();
}
}

View File

@@ -60,17 +60,16 @@ import java.util.concurrent.atomic.AtomicLong;
public class Mantle {
private final File dataFolder;
@Getter
private final int worldHeight;
private final Map<Long, Long> lastUse;
@Getter
private final Map<Long, TectonicPlate> loadedRegions;
private final HyperLock hyperLock;
private final KSet<Long> unload;
private final AtomicBoolean closed;
private final MultiBurst ioBurst;
private final AtomicBoolean ioTrim;
private final AtomicBoolean ioTectonicUnload;
public Looper ticker;
/**
* Create a new mantle
@@ -87,7 +86,6 @@ public class Mantle {
this.ioTrim = new AtomicBoolean(false);
this.ioTectonicUnload = new AtomicBoolean(false);
dataFolder.mkdirs();
unload = new KSet<>();
loadedRegions = new KMap<>();
lastUse = new KMap<>();
ioBurst = MultiBurst.burst;
@@ -416,124 +414,63 @@ public class Mantle {
throw new RuntimeException("The Mantle is closed");
}
Iris.debug(C.BLUE + "TECTONIC TRIM HAS RUN");
if (IrisSettings.get().getPerformance().getAggressiveTectonicThreshold() == -1) {
forceAggressiveThreshold.set(tectonicLimit.get());
} else {
forceAggressiveThreshold.set(IrisSettings.get().getPerformance().getAggressiveTectonicThreshold());
}
adjustedIdleDuration.set(baseIdleDuration);
if (loadedRegions != null) {
if (loadedRegions.size() > tectonicLimit.get()) {
// todo update this correctly and maybe do something when its above a 100%
int tectonicLimitValue = tectonicLimit.get();
adjustedIdleDuration.set(Math.max(adjustedIdleDuration.get() - (1000 * (((loadedRegions.size() - tectonicLimitValue) / (double) tectonicLimitValue) * 100) * 0.4), 4000));
int tectonicLimitValue = tectonicLimit.get();
adjustedIdleDuration.set(Math.max(adjustedIdleDuration.get() - (1000 * (((loadedRegions.size() - tectonicLimitValue) / (double) tectonicLimitValue) * 100) * 0.4), 4000));
}
}
ioTrim.set(true);
try {
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(adjustedIdleDuration.get(), 0));
if (lastUse != null) {
for (Long i : new ArrayList<>(lastUse.keySet())) {
double finalAdjustedIdleDuration = adjustedIdleDuration.get();
hyperLock.withLong(i, () -> {
Long lastUseTime = lastUse.get(i);
if (lastUseTime != null && M.ms() - lastUseTime >= finalAdjustedIdleDuration) {
toUnload.add(i);
Iris.debug("Tectonic Region added to unload");
}
});
try {
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(adjustedIdleDuration.get(), 0));
if (lastUse != null) {
for (Long i : new ArrayList<>(lastUse.keySet())) {
double finalAdjustedIdleDuration = adjustedIdleDuration.get();
hyperLock.withLong(i, () -> {
Long lastUseTime = lastUse.get(i);
if (lastUseTime != null && M.ms() - lastUseTime >= finalAdjustedIdleDuration) {
toUnload.add(i);
Iris.debug("Tectonic Region added to unload");
}
});
}
if (IrisSettings.get().getPerformance().AggressiveTectonicUnload
&& loadedRegions.size() > forceAggressiveThreshold.get()) {
AtomicInteger regionCountToRemove = new AtomicInteger(0);
if (loadedRegions.size() > tectonicLimit.get()) {
regionCountToRemove.set(loadedRegions.size() - tectonicLimit.get());
}
while (regionCountToRemove.get() > 0) {
Long[] oldestKey = {null};
long[] oldestAge = {Long.MIN_VALUE};
for (Long key : lastUse.keySet()) {
hyperLock.withLong(key, () -> {
if (!toUnload.contains(key)) {
long age = M.ms() - lastUse.get(key);
if (age > oldestAge[0]) {
oldestAge[0] = age;
oldestKey[0] = key;
}
}
});
}
if (oldestKey[0] != null) {
Long finalOldestKey = oldestKey[0];
hyperLock.withLong(finalOldestKey, () -> {
toUnload.add(finalOldestKey);
Iris.debug("Oldest Tectonic Region " + finalOldestKey + " added to unload");
regionCountToRemove.getAndDecrement();
});
}
}
}
} finally {
ioTrim.set(false);
}
} finally {
ioTrim.set(false);
}
}
public synchronized void unloadTectonicPlate() {
long time = System.currentTimeMillis();
try {
Iris.debug(C.DARK_BLUE + "TECTONIC UNLOAD HAS RUN");
int threadCount = 1;
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
List<Long> toUnloadList;
synchronized (toUnload) {
toUnloadList = new ArrayList<>(toUnload);
public void unloadTectonicPlate() {
try {
for (Long id : new ArrayList<>(toUnload)) {
hyperLock.withLong(id, () -> {
TectonicPlate m = loadedRegions.get(id);
if (m != null) {
try {
m.write(fileForRegion(dataFolder, id));
loadedRegions.remove(id);
lastUse.remove(id);
toUnload.remove(id);
Iris.info("Unloaded Tectonic Plate " + C.DARK_GREEN + Cache.keyX(id) + " " + Cache.keyZ(id));
} catch (IOException e) {
e.printStackTrace();
}
}
int chunkSize = (int) Math.ceil(toUnloadList.size() / (double) threadCount);
for (int i = 0; i < threadCount; i++) {
int start = i * chunkSize;
int end = Math.min(start + chunkSize, toUnloadList.size());
List<Long> sublist = toUnloadList.subList(start, end);
executorService.submit(() -> {
for (Long id : sublist) {
hyperLock.withLong(id, () -> {
TectonicPlate m = loadedRegions.get(id);
if (m != null) {
try {
m.write(fileForRegion(dataFolder, id));
loadedRegions.remove(id);
lastUse.remove(id);
toUnload.remove(id);
Iris.info("Unloaded Tectonic Plate " + C.DARK_GREEN + Cache.keyX(id) + " " + Cache.keyZ(id));
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
});
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (Exception e) {
e.printStackTrace();
}
});
}
} catch (Exception e) {
e.printStackTrace();
}
ioTectonicUnload.set(true);
}
/**
* This retreives a future of the Tectonic Plate at the given coordinates.
* All methods accessing tectonic plates should go through this method
@@ -639,10 +576,6 @@ public class Mantle {
}
public int getWorldHeight() {
return worldHeight;
}
public MantleChunk getChunk(Chunk e) {
return getChunk(e.getX(), e.getZ());
}