9
0
mirror of https://github.com/VolmitSoftware/Iris.git synced 2025-12-26 02:29:14 +00:00

Even less waste work & prefetch caching

This commit is contained in:
cyberpwn
2022-09-10 02:03:59 -04:00
parent 494c38a153
commit e545269b93
8 changed files with 190 additions and 18 deletions

View File

@@ -39,6 +39,8 @@ import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.math.RNG;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.scheduling.ChronoLatch;
import com.volmit.iris.util.scheduling.J;
import lombok.Data;
@@ -467,4 +469,38 @@ public class IrisData implements ExclusionStrategy, TypeAdapterFactory {
public boolean isClosed() {
return closed;
}
public void savePrefetch(Engine engine) {
BurstExecutor b = MultiBurst.burst.burst(loaders.size());
for(ResourceLoader<?> i : loaders.values()) {
b.queue(() -> {
try {
i.saveFirstAccess(engine);
} catch(IOException e) {
throw new RuntimeException(e);
}
});
}
b.complete();
Iris.info("Saved Prefetch Cache to speed up future world startups");
}
public void loadPrefetch(Engine engine) {
BurstExecutor b = MultiBurst.burst.burst(loaders.size());
for(ResourceLoader<?> i : loaders.values()) {
b.queue(() -> {
try {
i.loadFirstAccess(engine);
} catch(IOException e) {
throw new RuntimeException(e);
}
});
}
b.complete();
Iris.info("Loaded Prefetch Cache to reduce generation disk use.");
}
}

View File

@@ -23,32 +23,51 @@ import com.volmit.iris.Iris;
import com.volmit.iris.core.IrisSettings;
import com.volmit.iris.core.project.SchemaBuilder;
import com.volmit.iris.core.service.PreservationSVC;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.MeteredCache;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.data.KCache;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.format.Form;
import com.volmit.iris.util.io.CustomOutputStream;
import com.volmit.iris.util.io.IO;
import com.volmit.iris.util.json.JSONArray;
import com.volmit.iris.util.json.JSONObject;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.scheduling.ChronoLatch;
import com.volmit.iris.util.scheduling.J;
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
import lombok.Data;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@Data
public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
public static final AtomicDouble tlt = new AtomicDouble(0);
private static final int CACHE_SIZE = 100000;
protected KSet<String> firstAccess;
protected File root;
protected String folderName;
protected String resourceTypeName;
@@ -63,6 +82,7 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
public ResourceLoader(File root, IrisData manager, String folderName, String resourceTypeName, Class<? extends T> objectClass) {
this.manager = manager;
firstAccess = new KSet<>();
folderCache = new AtomicReference<>();
sec = new ChronoLatch(5000);
loads = new AtomicInteger();
@@ -221,6 +241,24 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
return m;
}
public KList<T> loadAllParallel(KList<String> s) {
KList<T> m = new KList<>();
BurstExecutor burst = MultiBurst.burst.burst(s.size());
for(String i : s) {
burst.queue(() -> {
T t = load(i);
if(t != null) {
m.add(t);
}
});
}
burst.complete();
return m;
}
public KList<T> loadAll(KList<String> s, Consumer<T> postLoad) {
KList<T> m = new KList<>();
@@ -282,12 +320,52 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
return null;
}
firstAccess.add(name);
return loadCache.get(name);
}
public void loadFirstAccess(Engine engine) throws IOException
{
String id = "DIM" + Math.abs(engine.getSeedManager().getSeed() + engine.getDimension().getVersion() + engine.getDimension().getLoadKey().hashCode());
File file = Iris.instance.getDataFile("prefetch/" + id + "/" + Math.abs(getFolderName().hashCode()) + ".ipfch");
if(!file.exists()) {
return;
}
FileInputStream fin = new FileInputStream(file);
GZIPInputStream gzi = new GZIPInputStream(fin);
DataInputStream din = new DataInputStream(gzi);
int m = din.readInt();
KList<String> s = new KList<>();
for(int i = 0; i < m; i++) {
s.add(din.readUTF());
}
din.close();
file.deleteOnExit();
Iris.info("Loading " + s.size() + " prefetch " + getFolderName());
loadAllParallel(s);
}
public void saveFirstAccess(Engine engine) throws IOException {
String id = "DIM" + Math.abs(engine.getSeedManager().getSeed() + engine.getDimension().getVersion() + engine.getDimension().getLoadKey().hashCode());
File file = Iris.instance.getDataFile("prefetch/" + id + "/" + Math.abs(getFolderName().hashCode()) + ".ipfch");
file.getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(file);
GZIPOutputStream gzo = new CustomOutputStream(fos, 9);
DataOutputStream dos = new DataOutputStream(gzo);
dos.writeInt(firstAccess.size());
for(String i : firstAccess) {
dos.writeUTF(i);
}
dos.flush();
dos.close();
}
public KList<File> getFolders() {
synchronized(folderCache) {
if(folderCache.get() == null) {
KList<File> fc = new KList<>();

View File

@@ -118,9 +118,9 @@ public class IrisComplex implements DataProvider {
overlayStream = ProceduralStream.ofDouble((x, z) -> 0.0D).waste("Overlay Stream");
engine.getDimension().getOverlayNoise().forEach(i -> overlayStream = overlayStream.add((x, z) -> i.get(rng, getData(), x, z)));
rockStream = engine.getDimension().getRockPalette().getLayerGenerator(rng.nextParallelRNG(45), data).stream()
.select(engine.getDimension().getRockPalette().getBlockData(data)).waste("Rock Stream");
.select(engine.getDimension().getRockPalette().getBlockData(data)).waste("Rock Stream").contextInjecting((c,x,z)->c.getRock().get(x, z));
fluidStream = engine.getDimension().getFluidPalette().getLayerGenerator(rng.nextParallelRNG(78), data).stream()
.select(engine.getDimension().getFluidPalette().getBlockData(data)).waste("Fluid Stream");
.select(engine.getDimension().getFluidPalette().getBlockData(data)).waste("Fluid Stream").contextInjecting((c,x,z)->c.getFluid().get(x, z));
regionStyleStream = engine.getDimension().getRegionStyle().create(rng.nextParallelRNG(883), getData()).stream()
.zoom(engine.getDimension().getRegionZoom()).waste("Region Style");
regionIdentityStream = regionStyleStream.fit(Integer.MIN_VALUE, Integer.MAX_VALUE).waste("Region Identity Stream");
@@ -129,7 +129,8 @@ public class IrisComplex implements DataProvider {
Interpolated.of(a -> 0D, a -> focusRegion))
: regionStyleStream
.selectRarity(data.getRegionLoader().loadAll(engine.getDimension().getRegions()))
.cache2D("regionStream", engine, cacheSize).waste("Region Stream");
.cache2D("regionStream", engine, cacheSize).waste("Region Stream")
.contextInjecting((c,x,z)->c.getRegion().get(x, z));
regionIDStream = regionIdentityStream.convertCached((i) -> new UUID(Double.doubleToLongBits(i),
String.valueOf(i * 38445).hashCode() * 3245556666L)).waste("Region ID Stream");
caveBiomeStream = regionStream.convert((r)
@@ -137,7 +138,8 @@ public class IrisComplex implements DataProvider {
.zoom(r.getCaveBiomeZoom())
.selectRarity(data.getBiomeLoader().loadAll(r.getCaveBiomes()))
.onNull(emptyBiome)
).convertAware2D(ProceduralStream::get).cache2D("caveBiomeStream", engine, cacheSize).waste("Cave Biome Stream");
).convertAware2D(ProceduralStream::get).cache2D("caveBiomeStream", engine, cacheSize).waste("Cave Biome Stream")
.contextInjecting((c,x,z)->c.getCave().get(x, z));
inferredStreams.put(InferredType.CAVE, caveBiomeStream);
landBiomeStream = regionStream.convert((r)
-> engine.getDimension().getLandBiomeStyle().create(rng.nextParallelRNG(InferredType.LAND.ordinal()), getData()).stream()
@@ -173,8 +175,10 @@ public class IrisComplex implements DataProvider {
heightStream = ProceduralStream.of((x, z) -> {
IrisBiome b = focusBiome != null ? focusBiome : baseBiomeStream.get(x, z);
return getHeight(engine, b, x, z, engine.getSeedManager().getHeight());
}, Interpolated.DOUBLE).clamp(0, engine.getHeight()).cache2D("heightStream", engine, cacheSize).waste("Height Stream");
roundedHeighteightStream = heightStream.round().waste("Rounded Height Stream");
}, Interpolated.DOUBLE).clamp(0, engine.getHeight()).cache2D("heightStream", engine, cacheSize).waste("Height Stream")
.contextInjecting((c,x,z)->c.getHeight().get(x, z));
roundedHeighteightStream = heightStream.round().waste("Rounded Height Stream")
.contextInjecting((c,x,z)->(int)Math.round(c.getHeight().get(x, z)));
slopeStream = heightStream.slope(3).cache2D("slopeStream", engine, cacheSize).waste("Slope Stream");
trueBiomeStream = focusBiome != null ? ProceduralStream.of((x, y) -> focusBiome, Interpolated.of(a -> 0D,
b -> focusBiome))
@@ -182,7 +186,8 @@ public class IrisComplex implements DataProvider {
.convertAware2D((h, x, z) ->
fixBiomeType(h, baseBiomeStream.get(x, z),
regionStream.get(x, z), x, z, fluidHeight))
.cache2D("trueBiomeStream", engine, cacheSize).waste("True Biome Stream");
.cache2D("trueBiomeStream", engine, cacheSize).waste("True Biome Stream")
.contextInjecting((c,x,z)->c.getBiome().get(x, z));
trueBiomeDerivativeStream = trueBiomeStream.convert(IrisBiome::getDerivative).cache2D("trueBiomeDerivativeStream", engine, cacheSize).waste("True Biome Derivative Stream");
heightFluidStream = heightStream.max(fluidHeight).cache2D("heightFluidStream", engine, cacheSize).waste("Height Fluid Stream");
maxHeightStream = ProceduralStream.ofDouble((x, z) -> height).waste("Max Height Stream");

View File

@@ -129,8 +129,9 @@ public class IrisEngine implements Engine {
context = new IrisContext(this);
cleaning = new AtomicBoolean(false);
context.touch();
Iris.info("Initializing Engine: " + target.getWorld().name() + "/" + target.getDimension().getLoadKey() + " (" + target.getDimension().getDimensionHeight() + " height) Seed: " + getSeedManager().getSeed());
getData().setEngine(this);
getData().loadPrefetch(this);
Iris.info("Initializing Engine: " + target.getWorld().name() + "/" + target.getDimension().getLoadKey() + " (" + target.getDimension().getDimensionHeight() + " height) Seed: " + getSeedManager().getSeed());
minHeight = 0;
failing = false;
closed = false;
@@ -454,6 +455,11 @@ public class IrisEngine implements Engine {
getMantle().getMantle().flag(x >> 4, z >> 4, MantleFlag.REAL, true);
getMetrics().getTotal().put(p.getMilliseconds());
generated.incrementAndGet();
if(generated.get() == 661) {
J.a(() -> getData().savePrefetch(this));
}
recycle();
} catch(Throwable e) {
Iris.reportError(e);

View File

@@ -146,9 +146,6 @@ public abstract class EngineAssignedWorldManager extends EngineAssignedComponent
@EventHandler
public void on(BlockBreakEvent e) {
if(e.getPlayer().getWorld().equals(getTarget().getWorld().realWorld())) {
WasteDetector.printAll();
onBlockBreak(e);
}
}

View File

@@ -73,16 +73,11 @@ public interface EngineMode extends Staged {
@BlockCoordinates
default void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes, boolean multicore) {
PrecisionStopwatch p = PrecisionStopwatch.start();
PrecisionStopwatch p2 = PrecisionStopwatch.start();
ChunkContext ctx = new ChunkContext(x, z, getComplex());
IrisContext.getOr(getEngine()).setChunkContext(ctx);
r.put(p.getMilliseconds());
for(EngineStage i : getStages()) {
i.generate(x, z, blocks, biomes, multicore, ctx);
}
r2.put(p2.getMilliseconds());
// Iris.warn(Form.duration(r.getAverage(), 2) + " Prep: TOTAL: " + C.RED + Form.duration(r2.getAverage(), 2));
}
}

View File

@@ -25,6 +25,7 @@ import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.object.IRare;
import com.volmit.iris.engine.object.IrisStyledRange;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.function.Function2;
import com.volmit.iris.util.function.Function3;
import com.volmit.iris.util.function.Function4;
@@ -62,6 +63,7 @@ import com.volmit.iris.util.stream.interpolation.Interpolated;
import com.volmit.iris.util.stream.sources.FunctionStream;
import com.volmit.iris.util.stream.utility.CachedStream2D;
import com.volmit.iris.util.stream.utility.CachedStream3D;
import com.volmit.iris.util.stream.utility.ContextInjectingStream;
import com.volmit.iris.util.stream.utility.NullSafeStream;
import com.volmit.iris.util.stream.utility.ProfiledStream;
import com.volmit.iris.util.stream.utility.SemaphoreStream;
@@ -135,6 +137,10 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
return new AddingStream<>(this, a);
}
default ProceduralStream<T> contextInjecting(Function3<ChunkContext, Integer, Integer, T> contextAccessor) {
return new ContextInjectingStream<>(this, contextAccessor);
}
default ProceduralStream<T> add(ProceduralStream<Double> a) {
return add2D((x, z) -> a.get(x, z));
}

View File

@@ -0,0 +1,49 @@
package com.volmit.iris.util.stream.utility;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.function.Function3;
import com.volmit.iris.util.stream.BasicStream;
import com.volmit.iris.util.stream.ProceduralStream;
import java.util.concurrent.atomic.AtomicInteger;
public class ContextInjectingStream<T> extends BasicStream<T> {
private final Function3<ChunkContext, Integer, Integer, T> contextAccessor;
public ContextInjectingStream(ProceduralStream<T> stream, Function3<ChunkContext, Integer, Integer, T> contextAccessor) {
super(stream);
this.contextAccessor = contextAccessor;
}
@Override
public T get(double x, double z) {
IrisContext context = IrisContext.get();
if(context != null) {
ChunkContext chunkContext = context.getChunkContext();
if(chunkContext != null && (int)x >> 4 == chunkContext.getX() >> 4 && (int)z >> 4 == chunkContext.getZ() >> 4) {
return contextAccessor.apply(chunkContext, (int)x&15, (int)z&15);
}
}
return getTypedSource().get(x, z);
}
@Override
public T get(double x, double y, double z) {
return getTypedSource().get(x, y, z);
}
@Override
public double toDouble(T t) {
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d) {
return getTypedSource().fromDouble(d);
}
}