9
0
mirror of https://github.com/VolmitSoftware/Iris.git synced 2026-01-03 22:26:25 +00:00

Compare commits

..

33 Commits

Author SHA1 Message Date
Brian Fopiano
563e0a7cd4 V+ 2022-09-12 16:00:14 -07:00
Brian Fopiano
2f2f857e98 Merge pull request #893 from VolmitSoftware/Development
Development
2022-09-12 15:59:20 -07:00
cyberpwn
251c52a2ee Fix cave mod engine npe 2022-09-12 17:26:14 -04:00
cyberpwn
7528bd343b 3x 2022-09-12 07:06:45 -04:00
cyberpwn
06d9f279ac f 2022-09-11 23:00:15 -04:00
cyberpwn
044403b829 Fix incorrect object positions 2022-09-11 21:31:21 -04:00
Brian Fopiano
5a24dd3b49 Revert "Revert "Eat me""
This reverts commit 3189b3c7a5.
2022-09-11 21:00:55 -04:00
Brian Fopiano
3189b3c7a5 Revert "Eat me"
This reverts commit da777da476.
2022-09-11 21:00:38 -04:00
cyberpwn
3bf1c6a282 Merge remote-tracking branch 'origin/Development' into Development 2022-09-11 20:59:04 -04:00
cyberpwn
7c7309c3ab Unload mantle when not generating 2022-09-11 20:58:58 -04:00
Brian Fopiano
da777da476 Eat me 2022-09-11 17:57:08 -07:00
cyberpwn
3d0c0a11ed Merge remote-tracking branch 'origin/Development' into Development 2022-09-10 02:04:58 -04:00
cyberpwn
e545269b93 Even less waste work & prefetch caching 2022-09-10 02:03:59 -04:00
Brian Fopiano
141ca76647 Merge pull request #890 from VolmitSoftware/Development
Development
2022-09-09 18:05:56 -07:00
Brian Fopiano
94557830f5 v+ 2022-09-09 18:02:21 -07:00
Vatuu
49acb7faba Merge remote-tracking branch 'origin/Development' into Development 2022-09-09 21:42:53 +02:00
Vatuu
1904f67662 Fixed Issue #864: Mantle not retaining object placements. 2022-09-09 19:48:23 +02:00
Vatuu
5b2bf38344 Fixed Issue #884: Cache freaking out on global override. 2022-09-09 18:10:52 +02:00
cyberpwn
494c38a153 Merge remote-tracking branch 'origin/Development' into Development 2022-09-09 01:25:16 -04:00
cyberpwn
dd4b85cbfe Fixes 2022-09-09 01:24:20 -04:00
Brian Fopiano
8f7b54a5a4 Merge pull request #889 from VolmitSoftware/Development
Development
2022-09-08 17:26:41 -07:00
Vatuu
cec502340e V+ 2022-09-09 01:15:53 +02:00
Vatuu
64e27c7fb0 Disabled blockdrops as hotfix. 2022-09-09 01:15:07 +02:00
cyberpwn
b2bbd31548 Disable multicore chunks 2022-09-07 17:07:34 -04:00
Brian Fopiano
17df92a07c Merge pull request #887 from VolmitSoftware/Development
Development
2022-09-06 20:55:18 -07:00
Brian Fopiano
2b749b5ab7 NMS update / Compile 2022-09-06 20:51:59 -07:00
cyberpwn
28e3402d88 r 2022-09-05 14:17:56 -04:00
cyberpwn
d96bb061e0 Update repo 2022-09-05 14:14:50 -04:00
Brian Fopiano
e6def804f8 Merge pull request #883 from VolmitSoftware/Development
Development
2022-09-02 17:48:22 -07:00
Vatuu
a5be48c07c V+ 2022-09-02 23:35:38 +02:00
Vatuu
fe3909f594 Fixed block drops not firing for biomes. 2022-09-02 23:32:51 +02:00
Vatuu
f37d91a530 Fixed loot table object override not working. 2022-09-02 23:26:47 +02:00
Vatuu
614ef78771 Fixed reliance on outdated spigot api version. 2022-09-02 20:32:17 +02:00
52 changed files with 10882 additions and 280 deletions

View File

@@ -24,10 +24,11 @@ plugins {
id "de.undercouch.download" version "5.0.1"
}
version '2.2.14-1.19.2' // Needs to be version specific
def nmsVersion = "1.19.2"
version '2.2.19-1.19.2' // Needs to be version specific
def nmsVersion = "1.19.2" //[NMS]
def apiVersion = '1.19'
def spigotJarVersion = '1.19.1-R0.1-SNAPSHOT'
def specialSourceVersion = '1.11.0' //[NMS]
def spigotJarVersion = '1.19.2-R0.1-SNAPSHOT' //[NMS]
def name = getRootProject().getName() // Defined in settings.gradle
def main = 'com.volmit.iris.Iris'
@@ -73,11 +74,8 @@ repositories {
includeGroup("org.spigotmc")
}
}
maven { url "https://dl.cloudsmith.io/public/arcane/archive/maven/" }
maven { url "https://maven.enginehub.org/repo/" }
mavenCentral()
mavenLocal()
maven { url "https://jitpack.io"}
maven { url "https://arcanearts.jfrog.io/artifactory/archives" }
}
/**
@@ -124,10 +122,10 @@ dependencies {
// Provided or Classpath
compileOnly 'org.projectlombok:lombok:1.18.24'
annotationProcessor 'org.projectlombok:lombok:1.18.24'
implementation 'org.spigotmc:spigot-api:1.19.1-R0.1-SNAPSHOT'
implementation 'org.spigotmc:spigot-api:1.19.2-R0.1-SNAPSHOT'
implementation 'me.clip:placeholderapi:2.11.1'
implementation 'io.th0rgal:oraxen:1.94.0'
implementation 'org.bukkit:craftbukkit:1.19.2-R0.1-SNAPSHOT:remapped-mojang'
implementation 'org.bukkit:craftbukkit:1.19.2-R0.1-SNAPSHOT:remapped-mojang' //[NMS]
implementation 'com.github.LoneDev6:api-itemsadder:3.1.0b'
// Shaded
@@ -194,7 +192,7 @@ task downloadBuildtools(type: Download) {
task downloadSpecialSource(type: Download) {
group "remapping"
src 'https://repo.maven.apache.org/maven2/net/md-5/SpecialSource/1.10.0/SpecialSource-1.10.0-shaded.jar'
src 'https://repo.maven.apache.org/maven2/net/md-5/SpecialSource/' + specialSourceVersion + '/SpecialSource-'+specialSourceVersion+'-shaded.jar'
dest specialSourceJar
onlyIf {
!specialSourceJar.exists()

View File

@@ -38,6 +38,7 @@ import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.plugin.VolmitSender;
import com.volmit.iris.util.scheduling.J;
import com.volmit.iris.util.scheduling.jobs.QueueJob;
import com.volmit.iris.util.stream.utility.ProfiledStream;
import org.bukkit.Bukkit;
import org.bukkit.Chunk;
import org.bukkit.World;

View File

@@ -39,6 +39,8 @@ import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.math.RNG;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.scheduling.ChronoLatch;
import com.volmit.iris.util.scheduling.J;
import lombok.Data;
@@ -467,4 +469,38 @@ public class IrisData implements ExclusionStrategy, TypeAdapterFactory {
public boolean isClosed() {
return closed;
}
public void savePrefetch(Engine engine) {
BurstExecutor b = MultiBurst.burst.burst(loaders.size());
for(ResourceLoader<?> i : loaders.values()) {
b.queue(() -> {
try {
i.saveFirstAccess(engine);
} catch(IOException e) {
throw new RuntimeException(e);
}
});
}
b.complete();
Iris.info("Saved Prefetch Cache to speed up future world startups");
}
public void loadPrefetch(Engine engine) {
BurstExecutor b = MultiBurst.burst.burst(loaders.size());
for(ResourceLoader<?> i : loaders.values()) {
b.queue(() -> {
try {
i.loadFirstAccess(engine);
} catch(IOException e) {
throw new RuntimeException(e);
}
});
}
b.complete();
Iris.info("Loaded Prefetch Cache to reduce generation disk use.");
}
}

View File

@@ -23,32 +23,51 @@ import com.volmit.iris.Iris;
import com.volmit.iris.core.IrisSettings;
import com.volmit.iris.core.project.SchemaBuilder;
import com.volmit.iris.core.service.PreservationSVC;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.MeteredCache;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.data.KCache;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.format.Form;
import com.volmit.iris.util.io.CustomOutputStream;
import com.volmit.iris.util.io.IO;
import com.volmit.iris.util.json.JSONArray;
import com.volmit.iris.util.json.JSONObject;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.scheduling.ChronoLatch;
import com.volmit.iris.util.scheduling.J;
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
import lombok.Data;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@Data
public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
public static final AtomicDouble tlt = new AtomicDouble(0);
private static final int CACHE_SIZE = 100000;
protected KSet<String> firstAccess;
protected File root;
protected String folderName;
protected String resourceTypeName;
@@ -63,6 +82,7 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
public ResourceLoader(File root, IrisData manager, String folderName, String resourceTypeName, Class<? extends T> objectClass) {
this.manager = manager;
firstAccess = new KSet<>();
folderCache = new AtomicReference<>();
sec = new ChronoLatch(5000);
loads = new AtomicInteger();
@@ -221,6 +241,24 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
return m;
}
public KList<T> loadAllParallel(KList<String> s) {
KList<T> m = new KList<>();
BurstExecutor burst = MultiBurst.burst.burst(s.size());
for(String i : s) {
burst.queue(() -> {
T t = load(i);
if(t != null) {
m.add(t);
}
});
}
burst.complete();
return m;
}
public KList<T> loadAll(KList<String> s, Consumer<T> postLoad) {
KList<T> m = new KList<>();
@@ -282,12 +320,52 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
return null;
}
firstAccess.add(name);
return loadCache.get(name);
}
public void loadFirstAccess(Engine engine) throws IOException
{
String id = "DIM" + Math.abs(engine.getSeedManager().getSeed() + engine.getDimension().getVersion() + engine.getDimension().getLoadKey().hashCode());
File file = Iris.instance.getDataFile("prefetch/" + id + "/" + Math.abs(getFolderName().hashCode()) + ".ipfch");
if(!file.exists()) {
return;
}
FileInputStream fin = new FileInputStream(file);
GZIPInputStream gzi = new GZIPInputStream(fin);
DataInputStream din = new DataInputStream(gzi);
int m = din.readInt();
KList<String> s = new KList<>();
for(int i = 0; i < m; i++) {
s.add(din.readUTF());
}
din.close();
file.deleteOnExit();
Iris.info("Loading " + s.size() + " prefetch " + getFolderName());
loadAllParallel(s);
}
public void saveFirstAccess(Engine engine) throws IOException {
String id = "DIM" + Math.abs(engine.getSeedManager().getSeed() + engine.getDimension().getVersion() + engine.getDimension().getLoadKey().hashCode());
File file = Iris.instance.getDataFile("prefetch/" + id + "/" + Math.abs(getFolderName().hashCode()) + ".ipfch");
file.getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(file);
GZIPOutputStream gzo = new CustomOutputStream(fos, 9);
DataOutputStream dos = new DataOutputStream(gzo);
dos.writeInt(firstAccess.size());
for(String i : firstAccess) {
dos.writeUTF(i);
}
dos.flush();
dos.close();
}
public KList<File> getFolders() {
synchronized(folderCache) {
if(folderCache.get() == null) {
KList<File> fc = new KList<>();

View File

@@ -33,17 +33,21 @@ import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.data.DataProvider;
import com.volmit.iris.util.math.M;
import com.volmit.iris.util.math.RNG;
import com.volmit.iris.util.noise.CNG;
import com.volmit.iris.util.stream.ProceduralStream;
import com.volmit.iris.util.stream.interpolation.Interpolated;
import com.volmit.iris.util.stream.utility.WasteDetector;
import lombok.Data;
import org.bukkit.Material;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import java.lang.reflect.Field;
import java.util.List;
import java.util.UUID;
@Data
@@ -112,96 +116,106 @@ public class IrisComplex implements DataProvider {
.getAllBiomes(this).forEach((b) -> b
.getGenerators()
.forEach((c) -> registerGenerator(c.getCachedGenerator(this)))));
overlayStream = ProceduralStream.ofDouble((x, z) -> 0.0D);
overlayStream = ProceduralStream.ofDouble((x, z) -> 0.0D).waste("Overlay Stream");
engine.getDimension().getOverlayNoise().forEach(i -> overlayStream = overlayStream.add((x, z) -> i.get(rng, getData(), x, z)));
rockStream = engine.getDimension().getRockPalette().getLayerGenerator(rng.nextParallelRNG(45), data).stream()
.select(engine.getDimension().getRockPalette().getBlockData(data));
.select(engine.getDimension().getRockPalette().getBlockData(data)).waste("Rock Stream");
fluidStream = engine.getDimension().getFluidPalette().getLayerGenerator(rng.nextParallelRNG(78), data).stream()
.select(engine.getDimension().getFluidPalette().getBlockData(data));
.select(engine.getDimension().getFluidPalette().getBlockData(data)).waste("Fluid Stream");
regionStyleStream = engine.getDimension().getRegionStyle().create(rng.nextParallelRNG(883), getData()).stream()
.zoom(engine.getDimension().getRegionZoom());
regionIdentityStream = regionStyleStream.fit(Integer.MIN_VALUE, Integer.MAX_VALUE);
.zoom(engine.getDimension().getRegionZoom()).waste("Region Style");
regionIdentityStream = regionStyleStream.fit(Integer.MIN_VALUE, Integer.MAX_VALUE).waste("Region Identity Stream");
regionStream = focusRegion != null ?
ProceduralStream.of((x, z) -> focusRegion,
Interpolated.of(a -> 0D, a -> focusRegion))
: regionStyleStream
.selectRarity(data.getRegionLoader().loadAll(engine.getDimension().getRegions()))
.cache2D("regionStream", engine, cacheSize);
regionIDStream = regionIdentityStream.convertCached((i) -> new UUID(Double.doubleToLongBits(i), String.valueOf(i * 38445).hashCode() * 3245556666L));
caveBiomeStream = regionStream.convert((r)
.cache2D("regionStream", engine, cacheSize).waste("Region Stream");
regionIDStream = regionIdentityStream.convertCached((i) -> new UUID(Double.doubleToLongBits(i),
String.valueOf(i * 38445).hashCode() * 3245556666L)).waste("Region ID Stream");
caveBiomeStream = regionStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getRegion().get(x, z))
.convert((r)
-> engine.getDimension().getCaveBiomeStyle().create(rng.nextParallelRNG(InferredType.CAVE.ordinal()), getData()).stream()
.zoom(r.getCaveBiomeZoom())
.selectRarity(data.getBiomeLoader().loadAll(r.getCaveBiomes()))
.onNull(emptyBiome)
).convertAware2D(ProceduralStream::get).cache2D("caveBiomeStream", engine, cacheSize);
).convertAware2D(ProceduralStream::get).cache2D("caveBiomeStream", engine, cacheSize).waste("Cave Biome Stream");
inferredStreams.put(InferredType.CAVE, caveBiomeStream);
landBiomeStream = regionStream.convert((r)
landBiomeStream = regionStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getRegion().get(x, z))
.convert((r)
-> engine.getDimension().getLandBiomeStyle().create(rng.nextParallelRNG(InferredType.LAND.ordinal()), getData()).stream()
.zoom(r.getLandBiomeZoom())
.selectRarity(data.getBiomeLoader().loadAll(r.getLandBiomes(), (t) -> t.setInferredType(InferredType.LAND)))
).convertAware2D(ProceduralStream::get)
.cache2D("landBiomeStream", engine, cacheSize);
.cache2D("landBiomeStream", engine, cacheSize).waste("Land Biome Stream");
inferredStreams.put(InferredType.LAND, landBiomeStream);
seaBiomeStream = regionStream.convert((r)
seaBiomeStream = regionStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getRegion().get(x, z))
.convert((r)
-> engine.getDimension().getSeaBiomeStyle().create(rng.nextParallelRNG(InferredType.SEA.ordinal()), getData()).stream()
.zoom(r.getSeaBiomeZoom())
.selectRarity(data.getBiomeLoader().loadAll(r.getSeaBiomes(), (t) -> t.setInferredType(InferredType.SEA)))
).convertAware2D(ProceduralStream::get)
.cache2D("seaBiomeStream", engine, cacheSize);
.cache2D("seaBiomeStream", engine, cacheSize).waste("Sea Biome Stream");
inferredStreams.put(InferredType.SEA, seaBiomeStream);
shoreBiomeStream = regionStream.convert((r)
shoreBiomeStream = regionStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getRegion().get(x, z))
.convert((r)
-> engine.getDimension().getShoreBiomeStyle().create(rng.nextParallelRNG(InferredType.SHORE.ordinal()), getData()).stream()
.zoom(r.getShoreBiomeZoom())
.selectRarity(data.getBiomeLoader().loadAll(r.getShoreBiomes(), (t) -> t.setInferredType(InferredType.SHORE)))
).convertAware2D(ProceduralStream::get).cache2D("shoreBiomeStream", engine, cacheSize);
).convertAware2D(ProceduralStream::get).cache2D("shoreBiomeStream", engine, cacheSize).waste("Shore Biome Stream");
inferredStreams.put(InferredType.SHORE, shoreBiomeStream);
bridgeStream = focusBiome != null ? ProceduralStream.of((x, z) -> focusBiome.getInferredType(),
Interpolated.of(a -> 0D, a -> focusBiome.getInferredType())) :
engine.getDimension().getContinentalStyle().create(rng.nextParallelRNG(234234565), getData())
.bake().scale(1D / engine.getDimension().getContinentZoom()).bake().stream()
.convert((v) -> v >= engine.getDimension().getLandChance() ? InferredType.SEA : InferredType.LAND)
.cache2D("bridgeStream", engine, cacheSize);
.cache2D("bridgeStream", engine, cacheSize).waste("Bridge Stream");
baseBiomeStream = focusBiome != null ? ProceduralStream.of((x, z) -> focusBiome,
Interpolated.of(a -> 0D, a -> focusBiome)) :
bridgeStream.convertAware2D((t, x, z) -> inferredStreams.get(t).get(x, z))
.convertAware2D(this::implode)
.cache2D("baseBiomeStream", engine, cacheSize);
.cache2D("baseBiomeStream", engine, cacheSize).waste("Base Biome Stream");
heightStream = ProceduralStream.of((x, z) -> {
IrisBiome b = focusBiome != null ? focusBiome : baseBiomeStream.get(x, z);
return getHeight(engine, b, x, z, engine.getSeedManager().getHeight());
}, Interpolated.DOUBLE).clamp(0, engine.getHeight()).cache2D("heightStream", engine, cacheSize);
roundedHeighteightStream = heightStream.round();
slopeStream = heightStream.slope(3).cache2D("slopeStream", engine, cacheSize);
}, Interpolated.DOUBLE).cache2D("heightStream", engine, cacheSize).waste("Height Stream");
roundedHeighteightStream = heightStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getHeight().get(x, z))
.round().waste("Rounded Height Stream");
slopeStream = heightStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getHeight().get(x, z))
.slope(3).cache2D("slopeStream", engine, cacheSize).waste("Slope Stream");
trueBiomeStream = focusBiome != null ? ProceduralStream.of((x, y) -> focusBiome, Interpolated.of(a -> 0D,
b -> focusBiome))
.cache2D("trueBiomeStream-focus", engine, cacheSize) : heightStream
.convertAware2D((h, x, z) ->
fixBiomeType(h, baseBiomeStream.get(x, z),
regionStream.get(x, z), x, z, fluidHeight))
.cache2D("trueBiomeStream", engine, cacheSize);
trueBiomeDerivativeStream = trueBiomeStream.convert(IrisBiome::getDerivative).cache2D("trueBiomeDerivativeStream", engine, cacheSize);
heightFluidStream = heightStream.max(fluidHeight).cache2D("heightFluidStream", engine, cacheSize);
maxHeightStream = ProceduralStream.ofDouble((x, z) -> height);
terrainSurfaceDecoration = trueBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.NONE)).cache2D("terrainSurfaceDecoration", engine, cacheSize);
terrainCeilingDecoration = trueBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.CEILING)).cache2D("terrainCeilingDecoration", engine, cacheSize);
terrainCaveSurfaceDecoration = caveBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.NONE)).cache2D("terrainCaveSurfaceDecoration", engine, cacheSize);
terrainCaveCeilingDecoration = caveBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.CEILING)).cache2D("terrainCaveCeilingDecoration", engine, cacheSize);
shoreSurfaceDecoration = trueBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SHORE_LINE)).cache2D("shoreSurfaceDecoration", engine, cacheSize);
seaSurfaceDecoration = trueBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SEA_SURFACE)).cache2D("seaSurfaceDecoration", engine, cacheSize);
seaFloorDecoration = trueBiomeStream
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SEA_FLOOR)).cache2D("seaFloorDecoration", engine, cacheSize);
baseBiomeIDStream = trueBiomeStream.convertAware2D((b, x, z) -> {
regionStream.contextInjecting((c,xx,zz)-> IrisContext.getOr(engine).getChunkContext().getRegion().get(xx, zz)).get(x, z), x, z, fluidHeight))
.cache2D("trueBiomeStream", engine, cacheSize).waste("True Biome Stream");
trueBiomeDerivativeStream = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convert(IrisBiome::getDerivative).cache2D("trueBiomeDerivativeStream", engine, cacheSize).waste("True Biome Derivative Stream");
heightFluidStream = heightStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getHeight().get(x, z))
.max(fluidHeight).cache2D("heightFluidStream", engine, cacheSize).waste("Height Fluid Stream");
maxHeightStream = ProceduralStream.ofDouble((x, z) -> height).waste("Max Height Stream");
terrainSurfaceDecoration = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.NONE)).cache2D("terrainSurfaceDecoration", engine, cacheSize).waste("Surface Decoration Stream");
terrainCeilingDecoration = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.CEILING)).cache2D("terrainCeilingDecoration", engine, cacheSize).waste("Ceiling Decoration Stream");
terrainCaveSurfaceDecoration = caveBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getCave().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.NONE)).cache2D("terrainCaveSurfaceDecoration", engine, cacheSize).waste("Cave Surface Stream");
terrainCaveCeilingDecoration = caveBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getCave().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.CEILING)).cache2D("terrainCaveCeilingDecoration", engine, cacheSize).waste("Cave Ceiling Stream");
shoreSurfaceDecoration = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SHORE_LINE)).cache2D("shoreSurfaceDecoration", engine, cacheSize).waste("Shore Surface Stream");
seaSurfaceDecoration = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SEA_SURFACE)).cache2D("seaSurfaceDecoration", engine, cacheSize).waste("Sea Surface Stream");
seaFloorDecoration = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, xx, zz) -> decorateFor(b, xx, zz, IrisDecorationPart.SEA_FLOOR)).cache2D("seaFloorDecoration", engine, cacheSize).waste("Sea Floor Stream");
baseBiomeIDStream = trueBiomeStream.contextInjecting((c,x,z)-> IrisContext.getOr(engine).getChunkContext().getBiome().get(x, z))
.convertAware2D((b, x, z) -> {
UUID d = regionIDStream.get(x, z);
return new UUID(b.getLoadKey().hashCode() * 818223L,
d.hashCode());
})
.cache2D("", engine, cacheSize);
.cache2D("", engine, cacheSize).waste("Biome ID Stream");
//@done
}
@@ -314,7 +328,7 @@ public class IrisComplex implements DataProvider {
}
return 0;
});
});;
double d = 0;

View File

@@ -46,6 +46,7 @@ import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.engine.scripting.EngineExecutionEnvironment;
import com.volmit.iris.util.atomics.AtomicRollingSequence;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.format.C;
@@ -128,8 +129,9 @@ public class IrisEngine implements Engine {
context = new IrisContext(this);
cleaning = new AtomicBoolean(false);
context.touch();
Iris.info("Initializing Engine: " + target.getWorld().name() + "/" + target.getDimension().getLoadKey() + " (" + target.getDimension().getDimensionHeight() + " height) Seed: " + getSeedManager().getSeed());
getData().setEngine(this);
getData().loadPrefetch(this);
Iris.info("Initializing Engine: " + target.getWorld().name() + "/" + target.getDimension().getLoadKey() + " (" + target.getDimension().getDimensionHeight() + " height) Seed: " + getSeedManager().getSeed());
minHeight = 0;
failing = false;
closed = false;
@@ -145,6 +147,7 @@ public class IrisEngine implements Engine {
}
private void tickRandomPlayer() {
recycle();
if(perSecondBudLatch.flip()) {
buds.set(bud.get());
bud.set(0);
@@ -192,8 +195,8 @@ public class IrisEngine implements Engine {
}
@Override
public void generateMatter(int x, int z, boolean multicore) {
getMantle().generateMatter(x, z, multicore);
public void generateMatter(int x, int z, boolean multicore, ChunkContext context) {
getMantle().generateMatter(x, z, multicore, context);
}
@Override
@@ -453,7 +456,10 @@ public class IrisEngine implements Engine {
getMantle().getMantle().flag(x >> 4, z >> 4, MantleFlag.REAL, true);
getMetrics().getTotal().put(p.getMilliseconds());
generated.incrementAndGet();
recycle();
if(generated.get() == 661) {
J.a(() -> getData().savePrefetch(this));
}
} catch(Throwable e) {
Iris.reportError(e);
fail("Failed to generate " + x + ", " + z, e);

View File

@@ -623,8 +623,8 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
}
});
KList<ItemStack> d = new KList<>();
IrisBiome b = getEngine().getBiome(e.getBlock().getLocation());
/*KList<ItemStack> d = new KList<>();
IrisBiome b = getEngine().getBiome(e.getBlock().getLocation().clone().subtract(0, getEngine().getWorld().minHeight(), 0));
List<IrisBlockDrops> dropProviders = filterDrops(b.getBlockDrops(), e, getData());
if(dropProviders.stream().noneMatch(IrisBlockDrops::isSkipParents)) {
@@ -642,7 +642,7 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
if(d.isNotEmpty()) {
World w = e.getBlock().getWorld();
J.s(() -> d.forEach(item -> w.dropItemNaturally(e.getBlock().getLocation().clone().add(.5, .5, .5), item)));
}
}*/
}
}

View File

@@ -25,6 +25,7 @@ import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.EngineAssignedActuator;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisBiomeCustom;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.hunk.view.BiomeGridHunkHolder;
@@ -75,52 +76,47 @@ public class IrisBiomeActuator extends EngineAssignedActuator<Biome> {
@BlockCoordinates
@Override
public void onActuate(int x, int z, Hunk<Biome> h, boolean multicore) {
public void onActuate(int x, int z, Hunk<Biome> h, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
BurstExecutor burst = burst().burst(PaperLib.isPaper() && multicore);
for(int xf = 0; xf < h.getWidth(); xf++) {
int finalXf = xf;
burst.queue(() -> {
IrisBiome ib;
for(int zf = 0; zf < h.getDepth(); zf++) {
ib = getComplex().getTrueBiomeStream().get(finalXf + x, zf + z);
int maxHeight = (int) (getComplex().getFluidHeight() + ib.getMaxWithObjectHeight(getData()));
if(ib.isCustom()) {
try {
IrisBiomeCustom custom = ib.getCustomBiome(rng, x, 0, z);
Object biomeBase = INMS.get().getCustomBiomeBaseHolderFor(getDimension().getLoadKey() + ":" + custom.getId());
IrisBiome ib;
for(int zf = 0; zf < h.getDepth(); zf++) {
ib = context.getBiome().get(xf, zf);
int maxHeight = (int) (getComplex().getFluidHeight() + ib.getMaxWithObjectHeight(getData()));
if(ib.isCustom()) {
try {
IrisBiomeCustom custom = ib.getCustomBiome(rng, x, 0, z);
Object biomeBase = INMS.get().getCustomBiomeBaseHolderFor(getDimension().getLoadKey() + ":" + custom.getId());
if(biomeBase == null || !injectBiome(h, x, 0, z, biomeBase)) {
throw new RuntimeException("Cant inject biome!");
}
for(int i = 0; i < maxHeight; i++) {
injectBiome(h, finalXf, i, zf, biomeBase);
}
} catch(Throwable e) {
Iris.reportError(e);
Biome v = ib.getSkyBiome(rng, x, 0, z);
for(int i = 0; i < maxHeight; i++) {
h.set(finalXf, i, zf, v);
}
if(biomeBase == null || !injectBiome(h, x, 0, z, biomeBase)) {
throw new RuntimeException("Cant inject biome!");
}
} else {
Biome v = ib.getSkyBiome(rng, x, 0, z);
if(v != null) {
for(int i = 0; i < maxHeight; i++) {
h.set(finalXf, i, zf, v);
}
} else if(cl.flip()) {
Iris.error("No biome provided for " + ib.getLoadKey());
for(int i = 0; i < maxHeight; i++) {
injectBiome(h, xf, i, zf, biomeBase);
}
} catch(Throwable e) {
Iris.reportError(e);
Biome v = ib.getSkyBiome(rng, x, 0, z);
for(int i = 0; i < maxHeight; i++) {
h.set(xf, i, zf, v);
}
}
} else {
Biome v = ib.getSkyBiome(rng, x, 0, z);
if(v != null) {
for(int i = 0; i < maxHeight; i++) {
h.set(xf, i, zf, v);
}
} else if(cl.flip()) {
Iris.error("No biome provided for " + ib.getLoadKey());
}
}
});
}
}
burst.complete();
getEngine().getMetrics().getBiome().put(p.getMilliseconds());
}
}

View File

@@ -27,6 +27,7 @@ import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.EngineAssignedActuator;
import com.volmit.iris.engine.framework.EngineDecorator;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.RNG;
@@ -66,75 +67,70 @@ public class IrisDecorantActuator extends EngineAssignedActuator<BlockData> {
@BlockCoordinates
@Override
public void onActuate(int x, int z, Hunk<BlockData> output, boolean multicore) {
public void onActuate(int x, int z, Hunk<BlockData> output, boolean multicore, ChunkContext context) {
if(!getEngine().getDimension().isDecorate()) {
return;
}
PrecisionStopwatch p = PrecisionStopwatch.start();
BurstExecutor burst = burst().burst(multicore);
for(int i = 0; i < output.getWidth(); i++) {
int finalI = i;
burst.queue(() -> {
int height;
int realX = Math.round(x + finalI);
int realZ;
IrisBiome biome, cave;
for(int j = 0; j < output.getDepth(); j++) {
boolean solid;
int emptyFor = 0;
int lastSolid = 0;
realZ = Math.round(z + j);
height = (int) Math.round(getComplex().getHeightStream().get(realX, realZ));
biome = getComplex().getTrueBiomeStream().get(realX, realZ);
cave = shouldRay ? getComplex().getCaveBiomeStream().get(realX, realZ) : null;
int height;
int realX = Math.round(x + i);
int realZ;
IrisBiome biome, cave;
for(int j = 0; j < output.getDepth(); j++) {
boolean solid;
int emptyFor = 0;
int lastSolid = 0;
realZ = Math.round(z + j);
height = (int) Math.round(context.getHeight().get(i, j));
biome = context.getBiome().get(i, j);
cave = shouldRay ? context.getCave().get(i, j) : null;
if(biome.getDecorators().isEmpty() && (cave == null || cave.getDecorators().isEmpty())) {
continue;
}
if(biome.getDecorators().isEmpty() && (cave == null || cave.getDecorators().isEmpty())) {
continue;
}
if(height < getDimension().getFluidHeight()) {
getSeaSurfaceDecorator().decorate(finalI, j,
realX, Math.round(finalI + 1), Math.round(x + finalI - 1),
realZ, Math.round(z + j + 1), Math.round(z + j - 1),
output, biome, getDimension().getFluidHeight(), getEngine().getHeight());
getSeaFloorDecorator().decorate(finalI, j,
realX, realZ, output, biome, height + 1,
getDimension().getFluidHeight() + 1);
}
if(height < getDimension().getFluidHeight()) {
getSeaSurfaceDecorator().decorate(i, j,
realX, Math.round(i + 1), Math.round(x + i - 1),
realZ, Math.round(z + j + 1), Math.round(z + j - 1),
output, biome, getDimension().getFluidHeight(), getEngine().getHeight());
getSeaFloorDecorator().decorate(i, j,
realX, realZ, output, biome, height + 1,
getDimension().getFluidHeight() + 1);
}
if(height == getDimension().getFluidHeight()) {
getShoreLineDecorator().decorate(finalI, j,
realX, Math.round(x + finalI + 1), Math.round(x + finalI - 1),
realZ, Math.round(z + j + 1), Math.round(z + j - 1),
output, biome, height, getEngine().getHeight());
}
if(height == getDimension().getFluidHeight()) {
getShoreLineDecorator().decorate(i, j,
realX, Math.round(x + i + 1), Math.round(x + i - 1),
realZ, Math.round(z + j + 1), Math.round(z + j - 1),
output, biome, height, getEngine().getHeight());
}
getSurfaceDecorator().decorate(finalI, j, realX, realZ, output, biome, height, getEngine().getHeight() - height);
getSurfaceDecorator().decorate(i, j, realX, realZ, output, biome, height, getEngine().getHeight() - height);
if(cave != null && cave.getDecorators().isNotEmpty()) {
for(int k = height; k > 0; k--) {
solid = PREDICATE_SOLID.test(output.get(finalI, k, j));
if(cave != null && cave.getDecorators().isNotEmpty()) {
for(int k = height; k > 0; k--) {
solid = PREDICATE_SOLID.test(output.get(i, k, j));
if(solid) {
if(emptyFor > 0) {
getSurfaceDecorator().decorate(finalI, j, realX, realZ, output, cave, k, lastSolid);
getCeilingDecorator().decorate(finalI, j, realX, realZ, output, cave, lastSolid - 1, emptyFor);
emptyFor = 0;
}
lastSolid = k;
} else {
emptyFor++;
if(solid) {
if(emptyFor > 0) {
getSurfaceDecorator().decorate(i, j, realX, realZ, output, cave, k, lastSolid);
getCeilingDecorator().decorate(i, j, realX, realZ, output, cave, lastSolid - 1, emptyFor);
emptyFor = 0;
}
lastSolid = k;
} else {
emptyFor++;
}
}
}
});
}
}
burst.complete();
getEngine().getMetrics().getDecoration().put(p.getMilliseconds());
}

View File

@@ -23,10 +23,10 @@ import com.volmit.iris.engine.framework.EngineAssignedActuator;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.RNG;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
import lombok.Getter;
import org.bukkit.Material;
@@ -50,17 +50,13 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
@BlockCoordinates
@Override
public void onActuate(int x, int z, Hunk<BlockData> h, boolean multicore) {
public void onActuate(int x, int z, Hunk<BlockData> h, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
BurstExecutor e = burst().burst(multicore);
for(int xf = 0; xf < h.getWidth(); xf++) {
int finalXf = xf;
e.queue(() -> terrainSliver(x, z, finalXf, h));
terrainSliver(x, z, xf, h, context);
}
e.complete();
getEngine().getMetrics().getTerrain().put(p.getMilliseconds());
}
@@ -81,7 +77,7 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
* the blockdata
*/
@BlockCoordinates
public void terrainSliver(int x, int z, int xf, Hunk<BlockData> h) {
public void terrainSliver(int x, int z, int xf, Hunk<BlockData> h, ChunkContext context) {
int zf, realX, realZ, hf, he;
IrisBiome biome;
IrisRegion region;
@@ -89,9 +85,9 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
for(zf = 0; zf < h.getDepth(); zf++) {
realX = xf + x;
realZ = zf + z;
biome = getComplex().getTrueBiomeStream().get(realX, realZ);
region = getComplex().getRegionStream().get(realX, realZ);
he = (int) Math.round(Math.min(h.getHeight(), getComplex().getHeightStream().get(realX, realZ)));
biome = context.getBiome().get(xf, zf);
region = context.getRegion().get(xf, zf);
he = (int) Math.round(Math.min(h.getHeight(), context.getHeight().get(xf, zf)));
hf = Math.round(Math.max(Math.min(h.getHeight(), getDimension().getFluidHeight()), he));
if(hf < 0) {
@@ -126,7 +122,7 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
continue;
}
h.set(xf, i, zf, getComplex().getFluidStream().get(realX, +realZ));
h.set(xf, i, zf, context.getFluid().get(xf,zf));
continue;
}
@@ -153,7 +149,7 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
if(ore != null) {
h.set(xf, i, zf, ore);
} else {
h.set(xf, i, zf, getComplex().getRockStream().get(realX, realZ));
h.set(xf, i, zf, context.getRock().get(xf, zf));
}
}
}

View File

@@ -24,7 +24,6 @@ import com.volmit.iris.core.gui.components.RenderType;
import com.volmit.iris.core.gui.components.Renderer;
import com.volmit.iris.core.loader.IrisData;
import com.volmit.iris.core.loader.IrisRegistrant;
import com.volmit.iris.core.nms.INMS;
import com.volmit.iris.engine.IrisComplex;
import com.volmit.iris.engine.data.cache.Cache;
import com.volmit.iris.engine.data.chunk.TerrainChunk;
@@ -33,6 +32,7 @@ import com.volmit.iris.engine.object.*;
import com.volmit.iris.engine.scripting.EngineExecutionEnvironment;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.data.DataProvider;
@@ -195,7 +195,7 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
return getComplex().getRegionStream().get(x, z);
}
void generateMatter(int x, int z, boolean multicore);
void generateMatter(int x, int z, boolean multicore, ChunkContext context);
@BlockCoordinates
default IrisBiome getCaveOrMantleBiome(int x, int y, int z) {
@@ -422,21 +422,27 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
default KList<IrisLootTable> getLootTables(RNG rng, Block b) {
int rx = b.getX();
int rz = b.getZ();
int ry = b.getY() - getWorld().minHeight();
double he = getComplex().getHeightStream().get(rx, rz);
PlacedObject po = getObjectPlacement(rx, b.getY(), rz);
if(po != null && po.getPlacement() != null) {
KList<IrisLootTable> tables = new KList<>();
PlacedObject po = getObjectPlacement(rx, ry, rz);
if(po != null && po.getPlacement() != null) {
if(B.isStorageChest(b.getBlockData())) {
IrisLootTable table = po.getPlacement().getTable(b.getBlockData(), getData());
if(table != null) {
return new KList<>(table);
tables.add(table);
if(po.getPlacement().isOverrideGlobalLoot()) {
return new KList<>(table);
}
}
}
}
IrisRegion region = getComplex().getRegionStream().get(rx, rz);
IrisBiome biomeSurface = getComplex().getTrueBiomeStream().get(rx, rz);
IrisBiome biomeUnder = b.getY() < he ? getComplex().getCaveBiomeStream().get(rx, rz) : biomeSurface;
KList<IrisLootTable> tables = new KList<>();
IrisBiome biomeUnder = ry < he ? getComplex().getCaveBiomeStream().get(rx, rz) : biomeSurface;
double multiplier = 1D * getDimension().getLoot().getMultiplier() * region.getLoot().getMultiplier() * biomeSurface.getLoot().getMultiplier() * biomeUnder.getLoot().getMultiplier();
injectTables(tables, getDimension().getLoot());
injectTables(tables, region.getLoot());

View File

@@ -18,10 +18,11 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
public interface EngineActuator<O> extends EngineComponent {
@BlockCoordinates
void actuate(int x, int z, Hunk<O> output, boolean multicore);
void actuate(int x, int z, Hunk<O> output, boolean multicore, ChunkContext context);
}

View File

@@ -18,6 +18,7 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
@@ -26,11 +27,11 @@ public abstract class EngineAssignedActuator<T> extends EngineAssignedComponent
super(engine, name);
}
public abstract void onActuate(int x, int z, Hunk<T> output, boolean multicore);
public abstract void onActuate(int x, int z, Hunk<T> output, boolean multicore, ChunkContext context);
@BlockCoordinates
@Override
public void actuate(int x, int z, Hunk<T> output, boolean multicore) {
onActuate(x, z, output, multicore);
public void actuate(int x, int z, Hunk<T> output, boolean multicore, ChunkContext context) {
onActuate(x, z, output, multicore, context);
}
}

View File

@@ -19,6 +19,7 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.Iris;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
@@ -28,13 +29,13 @@ public abstract class EngineAssignedModifier<T> extends EngineAssignedComponent
}
@BlockCoordinates
public abstract void onModify(int x, int z, Hunk<T> output, boolean multicore);
public abstract void onModify(int x, int z, Hunk<T> output, boolean multicore, ChunkContext context);
@BlockCoordinates
@Override
public void modify(int x, int z, Hunk<T> output, boolean multicore) {
public void modify(int x, int z, Hunk<T> output, boolean multicore, ChunkContext context) {
try {
onModify(x, z, output, multicore);
onModify(x, z, output, multicore, context);
} catch(Throwable e) {
Iris.error("Modifier Failure: " + getName());
e.printStackTrace();

View File

@@ -24,6 +24,7 @@ import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.math.Position2;
import com.volmit.iris.util.plugin.VolmitSender;
import com.volmit.iris.util.stream.utility.WasteDetector;
import io.papermc.lib.PaperLib;
import org.bukkit.Bukkit;
import org.bukkit.Location;

View File

@@ -18,12 +18,19 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.Iris;
import com.volmit.iris.engine.IrisComplex;
import com.volmit.iris.engine.mantle.EngineMantle;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.format.Form;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.RollingSequence;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
@@ -37,12 +44,12 @@ public interface EngineMode extends Staged {
}
default EngineStage burst(EngineStage... stages) {
return (x, z, blocks, biomes, multicore) -> {
return (x, z, blocks, biomes, multicore, ctx) -> {
BurstExecutor e = burst().burst(stages.length);
e.setMulticore(multicore);
for(EngineStage i : stages) {
e.queue(() -> i.generate(x, z, blocks, biomes, multicore));
e.queue(() -> i.generate(x, z, blocks, biomes, multicore, ctx));
}
e.complete();
@@ -57,14 +64,20 @@ public interface EngineMode extends Staged {
return getEngine().getMantle();
}
default void generateMatter(int x, int z, boolean multicore) {
getMantle().generateMatter(x, z, multicore);
default void generateMatter(int x, int z, boolean multicore, ChunkContext context) {
getMantle().generateMatter(x, z, multicore, context);
}
public static final RollingSequence r = new RollingSequence(64);
public static final RollingSequence r2 = new RollingSequence(256);
@BlockCoordinates
default void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes, boolean multicore) {
ChunkContext ctx = new ChunkContext(x, z, getComplex());
IrisContext.getOr(getEngine()).setChunkContext(ctx);
for(EngineStage i : getStages()) {
i.generate(x, z, blocks, biomes, multicore);
i.generate(x, z, blocks, biomes, multicore, ctx);
}
}
}

View File

@@ -18,10 +18,11 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
public interface EngineModifier<T> extends EngineComponent {
@BlockCoordinates
void modify(int x, int z, Hunk<T> t, boolean multicore);
void modify(int x, int z, Hunk<T> t, boolean multicore, ChunkContext context);
}

View File

@@ -18,6 +18,7 @@
package com.volmit.iris.engine.framework;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.hunk.Hunk;
import org.bukkit.block.Biome;
@@ -25,7 +26,7 @@ import org.bukkit.block.data.BlockData;
public interface EngineStage {
@BlockCoordinates
void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes, boolean multicore);
void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes, boolean multicore, ChunkContext context);
default void close() {
if(this instanceof EngineComponent c) {

View File

@@ -24,6 +24,7 @@ import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisJigsawStructure;
import com.volmit.iris.engine.object.IrisObject;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.format.Form;
import com.volmit.iris.util.math.M;
import com.volmit.iris.util.math.Position2;
@@ -175,7 +176,7 @@ public interface Locator<T> {
static Locator<IrisBiome> caveOrMantleBiome(String loadKey) {
return (e, c) -> {
AtomicBoolean found = new AtomicBoolean(false);
e.generateMatter(c.getX(), c.getZ(), true);
e.generateMatter(c.getX(), c.getZ(), true, new ChunkContext(c.getX() << 4, c.getZ() << 4, e.getComplex(), false));
e.getMantle().getMantle().iterateChunk(c.getX(), c.getZ(), MatterCavern.class, (x, y, z, t) -> {
if(found.get()) {
return;

View File

@@ -126,8 +126,7 @@ public class PlannedStructure {
}
int id = rng.i(0, Integer.MAX_VALUE);
vo.place(xx, height, zz, placer, options, rng, e.shouldReduce(eng) ? null : (b)
-> e.set(b.getX(), b.getY(), b.getZ(), v.getLoadKey() + "@" + id), null, getData());
vo.place(xx, height, zz, placer, options, rng, (b) -> e.set(b.getX(), b.getY(), b.getZ(), v.getLoadKey() + "@" + id), null, getData());
}
public void place(World world) {

View File

@@ -30,6 +30,7 @@ import com.volmit.iris.engine.object.IrisDimension;
import com.volmit.iris.engine.object.IrisPosition;
import com.volmit.iris.engine.object.TileData;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.documentation.BlockCoordinates;
@@ -188,7 +189,7 @@ public interface EngineMantle extends IObjectPlacer {
@ChunkCoordinates
default void generateMatter(int x, int z, boolean multicore) {
default void generateMatter(int x, int z, boolean multicore, ChunkContext context) {
if(!getEngine().getDimension().isUseMantle()) {
return;
}
@@ -206,7 +207,7 @@ public interface EngineMantle extends IObjectPlacer {
MantleChunk mc = getMantle().getChunk(xx, zz);
for(MantleComponent k : getComponents()) {
generateMantleComponent(writer, xx, zz, k, mc);
generateMantleComponent(writer, xx, zz, k, mc, context);
}
});
});
@@ -216,8 +217,8 @@ public interface EngineMantle extends IObjectPlacer {
burst.complete();
}
default void generateMantleComponent(MantleWriter writer, int x, int z, MantleComponent c, MantleChunk mc) {
mc.raiseFlag(c.getFlag(), () -> c.generateLayer(writer, x, z));
default void generateMantleComponent(MantleWriter writer, int x, int z, MantleComponent c, MantleChunk mc, ChunkContext context) {
mc.raiseFlag(c.getFlag(), () -> c.generateLayer(writer, x, z, context));
}
@ChunkCoordinates

View File

@@ -21,6 +21,7 @@ package com.volmit.iris.engine.mantle;
import com.volmit.iris.core.loader.IrisData;
import com.volmit.iris.engine.IrisComplex;
import com.volmit.iris.engine.object.IrisDimension;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.ChunkCoordinates;
import com.volmit.iris.util.mantle.Mantle;
import com.volmit.iris.util.mantle.MantleFlag;
@@ -60,5 +61,5 @@ public interface MantleComponent {
MantleFlag getFlag();
@ChunkCoordinates
void generateLayer(MantleWriter writer, int x, int z);
void generateLayer(MantleWriter writer, int x, int z, ChunkContext context);
}

View File

@@ -25,6 +25,7 @@ import com.volmit.iris.engine.mantle.MantleWriter;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisCarving;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.ChunkCoordinates;
import com.volmit.iris.util.mantle.MantleFlag;
import com.volmit.iris.util.math.RNG;
@@ -35,11 +36,11 @@ public class MantleCarvingComponent extends IrisMantleComponent {
}
@Override
public void generateLayer(MantleWriter writer, int x, int z) {
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
RNG rng = new RNG(Cache.key(x, z) + seed());
int xxx = 8 + (x << 4);
int zzz = 8 + (z << 4);
IrisRegion region = getComplex().getRegionStream().get(xxx, zzz);
IrisRegion region =getComplex().getRegionStream().get(xxx, zzz);
IrisBiome biome = getComplex().getTrueBiomeStream().get(xxx, zzz);
carve(writer, rng, x, z, region, biome);
}

View File

@@ -25,6 +25,7 @@ import com.volmit.iris.engine.mantle.MantleWriter;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisFluidBodies;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.ChunkCoordinates;
import com.volmit.iris.util.mantle.MantleFlag;
import com.volmit.iris.util.math.RNG;
@@ -35,11 +36,11 @@ public class MantleFluidBodyComponent extends IrisMantleComponent {
}
@Override
public void generateLayer(MantleWriter writer, int x, int z) {
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
RNG rng = new RNG(Cache.key(x, z) + seed() + 405666);
int xxx = 8 + (x << 4);
int zzz = 8 + (z << 4);
IrisRegion region = getComplex().getRegionStream().get(xxx, zzz);
IrisRegion region =getComplex().getRegionStream().get(xxx, zzz);
IrisBiome biome = getComplex().getTrueBiomeStream().get(xxx, zzz);
generate(writer, rng, x, z, region, biome);
}

View File

@@ -28,6 +28,7 @@ import com.volmit.iris.engine.object.IrisJigsawStructurePlacement;
import com.volmit.iris.engine.object.IrisPosition;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.engine.object.NoiseStyle;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.documentation.ChunkCoordinates;
import com.volmit.iris.util.mantle.MantleFlag;
@@ -46,11 +47,11 @@ public class MantleJigsawComponent extends IrisMantleComponent {
}
@Override
public void generateLayer(MantleWriter writer, int x, int z) {
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
RNG rng = new RNG(cng.fit(-Integer.MAX_VALUE, Integer.MAX_VALUE, x, z));
int xxx = 8 + (x << 4);
int zzz = 8 + (z << 4);
IrisRegion region = getComplex().getRegionStream().get(xxx, zzz);
IrisRegion region =getComplex().getRegionStream().get(xxx, zzz);
IrisBiome biome = getComplex().getTrueBiomeStream().get(xxx, zzz);
generateJigsaw(writer, rng, x, z, biome, region);
}

View File

@@ -28,6 +28,7 @@ import com.volmit.iris.engine.object.IrisObject;
import com.volmit.iris.engine.object.IrisObjectPlacement;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.documentation.ChunkCoordinates;
import com.volmit.iris.util.mantle.MantleFlag;
@@ -41,11 +42,11 @@ public class MantleObjectComponent extends IrisMantleComponent {
}
@Override
public void generateLayer(MantleWriter writer, int x, int z) {
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
RNG rng = new RNG(Cache.key(x, z) + seed());
int xxx = 8 + (x << 4);
int zzz = 8 + (z << 4);
IrisRegion region = getComplex().getRegionStream().get(xxx, zzz);
IrisRegion region =getComplex().getRegionStream().get(xxx, zzz);
IrisBiome biome = getComplex().getTrueBiomeStream().get(xxx, zzz);
placeObjects(writer, rng, x, z, biome, region);
}
@@ -95,9 +96,7 @@ public class MantleObjectComponent extends IrisMantleComponent {
int xx = rng.i(x, x + 15);
int zz = rng.i(z, z + 15);
int id = rng.i(0, Integer.MAX_VALUE);
v.place(xx, -1, zz, writer, objectPlacement, rng,
getMantle().shouldReduce(getEngineMantle().getEngine()) ? null : (b) -> writer.setData(b.getX(), b.getY(), b.getZ(),
v.getLoadKey() + "@" + id), null, getData());
v.place(xx, -1, zz, writer, objectPlacement, rng, (b) -> writer.setData(b.getX(), b.getY(), b.getZ(), v.getLoadKey() + "@" + id), null, getData());
}
}

View File

@@ -31,8 +31,8 @@ public class ModeEnclosure extends IrisEngineMode implements EngineMode {
var biome = new IrisBiomeActuator(getEngine());
registerStage(burst(
(x, z, k, p, m) -> terrain.actuate(x, z, k, m),
(x, z, k, p, m) -> biome.actuate(x, z, p, m)
(x, z, k, p, m, c) -> terrain.actuate(x, z, k, m, c),
(x, z, k, p, m, c) -> biome.actuate(x, z, p, m, c)
));
}
}

View File

@@ -31,8 +31,8 @@ public class ModeIslands extends IrisEngineMode implements EngineMode {
var biome = new IrisBiomeActuator(getEngine());
registerStage(burst(
(x, z, k, p, m) -> terrain.actuate(x, z, k, m),
(x, z, k, p, m) -> biome.actuate(x, z, p, m)
(x, z, k, p, m, c) -> terrain.actuate(x, z, k, m, c),
(x, z, k, p, m, c) -> biome.actuate(x, z, p, m, c)
));
}
}

View File

@@ -23,6 +23,7 @@ import com.volmit.iris.engine.actuator.IrisDecorantActuator;
import com.volmit.iris.engine.actuator.IrisTerrainNormalActuator;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.EngineMode;
import com.volmit.iris.engine.framework.EngineStage;
import com.volmit.iris.engine.framework.IrisEngineMode;
import com.volmit.iris.engine.modifier.IrisCarveModifier;
import com.volmit.iris.engine.modifier.IrisDepositModifier;
@@ -30,7 +31,6 @@ import com.volmit.iris.engine.modifier.IrisPerfectionModifier;
import com.volmit.iris.engine.modifier.IrisPostModifier;
import org.bukkit.block.data.BlockData;
public class ModeOverworld extends IrisEngineMode implements EngineMode {
public ModeOverworld(Engine engine) {
super(engine);
@@ -41,17 +41,31 @@ public class ModeOverworld extends IrisEngineMode implements EngineMode {
var post = new IrisPostModifier(getEngine());
var deposit = new IrisDepositModifier(getEngine());
var perfection = new IrisPerfectionModifier(getEngine());
EngineStage sBiome = (x, z, k, p, m, c) -> biome.actuate(x, z, p, m, c);
EngineStage sGenMatter = (x, z, k, p, m, c) -> generateMatter(x >> 4, z >> 4, m, c);
EngineStage sTerrain = (x, z, k, p, m, c) -> terrain.actuate(x, z, k, m, c);
EngineStage sDecorant = (x, z, k, p, m, c) -> decorant.actuate(x, z, k, m, c);
EngineStage sCave = (x, z, k, p, m, c) -> cave.modify(x >> 4, z >> 4, k, m, c);
EngineStage sDeposit = (x, z, k, p, m, c) -> deposit.modify(x, z, k, m,c);
EngineStage sPost = (x, z, k, p, m, c) -> post.modify(x, z, k, m, c);
EngineStage sInsertMatter = (x, z, K, p, m, c) -> getMantle().insertMatter(x >> 4, z >> 4, BlockData.class, K, m);
EngineStage sPerfection = (x, z, k, p, m, c) -> perfection.modify(x, z, k, m, c);
registerStage((x, z, k, p, m) -> biome.actuate(x, z, p, m));
registerStage(burst(
(x, z, k, p, m) -> generateMatter(x >> 4, z >> 4, m),
(x, z, k, p, m) -> terrain.actuate(x, z, k, m)
sBiome,
sGenMatter,
sTerrain
));
registerStage((x, z, k, p, m) -> cave.modify(x >> 4, z >> 4, k, m));
registerStage((x, z, k, p, m) -> deposit.modify(x, z, k, m));
registerStage((x, z, k, p, m) -> decorant.actuate(x, z, k, m));
registerStage((x, z, k, p, m) -> post.modify(x, z, k, m));
registerStage((x, z, K, p, m) -> getMantle().insertMatter(x >> 4, z >> 4, BlockData.class, K, m));
registerStage((x, z, k, p, m) -> perfection.modify(x, z, k, m));
registerStage(burst(
sCave,
sPost
));
registerStage(burst(
sDeposit,
sInsertMatter,
sDecorant
));
registerStage(sPerfection);
}
}

View File

@@ -31,8 +31,8 @@ public class ModeSuperFlat extends IrisEngineMode implements EngineMode {
var biome = new IrisBiomeActuator(getEngine());
registerStage(burst(
(x, z, k, p, m) -> terrain.actuate(x, z, k, m),
(x, z, k, p, m) -> biome.actuate(x, z, p, m)
(x, z, k, p, m, c) -> terrain.actuate(x, z, k, m, c),
(x, z, k, p, m, c) -> biome.actuate(x, z, p, m, c)
));
}
}

View File

@@ -29,6 +29,7 @@ import com.volmit.iris.engine.object.IrisDecorator;
import com.volmit.iris.engine.object.IrisPosition;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.function.Consumer4;
@@ -58,7 +59,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
@Override
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore) {
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
Mantle mantle = getEngine().getMantle().getMantle();
MantleChunk mc = getEngine().getMantle().getMantle().getChunk(x, z);
@@ -130,7 +131,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
biome.setInferredType(InferredType.CAVE);
BlockData d = biome.getWall().get(rng, i.getX() + (x << 4), i.getY(), i.getZ() + (z << 4), getData());
if(d != null && B.isSolid(output.get(i.getX(), i.getY(), i.getZ())) && i.getY() <= getComplex().getHeightStream().get(i.getX() + (x << 4), i.getZ() + (z << 4))) {
if(d != null && B.isSolid(output.get(i.getX(), i.getY(), i.getZ())) && i.getY() <= context.getHeight().get(i.getX(), i.getZ())) {
output.set(i.getX(), i.getY(), i.getZ(), d);
}
}
@@ -156,7 +157,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
if(i == buf + 1) {
buf = i;
zone.ceiling = buf;
} else if(zone.isValid()) {
} else if(zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4));
zone = new CaveZone();
zone.setFloor(i);
@@ -164,7 +165,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
}
if(zone.isValid()) {
if(zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4));
}
});
@@ -275,8 +276,8 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
return (ceiling - floor) - 1;
}
public boolean isValid() {
return floor < ceiling && ceiling - floor >= 1 && floor >= 0 && ceiling <= IrisContext.get().getEngine().getHeight() && airThickness() > 0;
public boolean isValid(Engine engine) {
return floor < ceiling && ceiling - floor >= 1 && floor >= 0 && ceiling <= engine.getHeight() && airThickness() > 0;
}
public String toString() {

View File

@@ -24,6 +24,7 @@ import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisDepositGenerator;
import com.volmit.iris.engine.object.IrisObject;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.data.HeightMap;
import com.volmit.iris.util.hunk.Hunk;
@@ -42,41 +43,41 @@ public class IrisDepositModifier extends EngineAssignedModifier<BlockData> {
}
@Override
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore) {
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
generateDeposits(rng, output, Math.floorDiv(x, 16), Math.floorDiv(z, 16), multicore);
generateDeposits(rng, output, Math.floorDiv(x, 16), Math.floorDiv(z, 16), multicore, context);
getEngine().getMetrics().getDeposit().put(p.getMilliseconds());
}
public void generateDeposits(RNG rx, Hunk<BlockData> terrain, int x, int z, boolean multicore) {
public void generateDeposits(RNG rx, Hunk<BlockData> terrain, int x, int z, boolean multicore, ChunkContext context) {
RNG ro = rx.nextParallelRNG(x * x).nextParallelRNG(z * z);
IrisRegion region = getComplex().getRegionStream().get((x * 16) + 7, (z * 16) + 7);
IrisBiome biome = getComplex().getTrueBiomeStream().get((x * 16) + 7, (z * 16) + 7);
IrisRegion region = context.getRegion().get(7,7);
IrisBiome biome = context.getBiome().get(7,7);
BurstExecutor burst = burst().burst(multicore);
for(IrisDepositGenerator k : getDimension().getDeposits()) {
burst.queue(() -> generate(k, terrain, ro, x, z, false));
burst.queue(() -> generate(k, terrain, ro, x, z, false, context));
}
for(IrisDepositGenerator k : region.getDeposits()) {
for(int l = 0; l < ro.i(k.getMinPerChunk(), k.getMaxPerChunk()); l++) {
burst.queue(() -> generate(k, terrain, ro, x, z, false));
burst.queue(() -> generate(k, terrain, ro, x, z, false, context));
}
}
for(IrisDepositGenerator k : biome.getDeposits()) {
for(int l = 0; l < ro.i(k.getMinPerChunk(), k.getMaxPerChunk()); l++) {
burst.queue(() -> generate(k, terrain, ro, x, z, false));
burst.queue(() -> generate(k, terrain, ro, x, z, false, context));
}
}
burst.complete();
}
public void generate(IrisDepositGenerator k, Hunk<BlockData> data, RNG rng, int cx, int cz, boolean safe) {
generate(k, data, rng, cx, cz, safe, null);
public void generate(IrisDepositGenerator k, Hunk<BlockData> data, RNG rng, int cx, int cz, boolean safe, ChunkContext context) {
generate(k, data, rng, cx, cz, safe, null, context);
}
public void generate(IrisDepositGenerator k, Hunk<BlockData> data, RNG rng, int cx, int cz, boolean safe, HeightMap he) {
public void generate(IrisDepositGenerator k, Hunk<BlockData> data, RNG rng, int cx, int cz, boolean safe, HeightMap he, ChunkContext context) {
for(int l = 0; l < rng.i(k.getMinPerChunk(), k.getMaxPerChunk()); l++) {
IrisObject clump = k.getClump(rng, getData());
@@ -92,7 +93,7 @@ public class IrisDepositModifier extends EngineAssignedModifier<BlockData> {
int x = rng.i(af, bf);
int z = rng.i(af, bf);
int height = (he != null ? he.getHeight((cx << 4) + x, (cz << 4) + z) : (int) (Math.round(
getComplex().getHeightStream().get((cx << 4) + x, (cz << 4) + z)
context.getHeight().get( x, z)
))) - 7;
if(height <= 0) {

View File

@@ -20,6 +20,7 @@ package com.volmit.iris.engine.modifier;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.EngineAssignedModifier;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.parallel.BurstExecutor;
@@ -41,7 +42,7 @@ public class IrisPerfectionModifier extends EngineAssignedModifier<BlockData> {
}
@Override
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore) {
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
AtomicBoolean changed = new AtomicBoolean(true);
int passes = 0;

View File

@@ -21,6 +21,7 @@ package com.volmit.iris.engine.modifier;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.EngineAssignedModifier;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.RNG;
@@ -45,7 +46,7 @@ public class IrisPostModifier extends EngineAssignedModifier<BlockData> {
}
@Override
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore) {
public void onModify(int x, int z, Hunk<BlockData> output, boolean multicore, ChunkContext context) {
PrecisionStopwatch p = PrecisionStopwatch.start();
AtomicInteger i = new AtomicInteger();
AtomicInteger j = new AtomicInteger();
@@ -54,14 +55,14 @@ public class IrisPostModifier extends EngineAssignedModifier<BlockData> {
for(j.set(0); j.get() < output.getDepth(); j.getAndIncrement()) {
int ii = i.get();
int jj = j.get();
post(ii, jj, sync, ii + x, jj + z);
post(ii, jj, sync, ii + x, jj + z, context);
}
}
getEngine().getMetrics().getPost().put(p.getMilliseconds());
}
private void post(int currentPostX, int currentPostZ, Hunk<BlockData> currentData, int x, int z) {
private void post(int currentPostX, int currentPostZ, Hunk<BlockData> currentData, int x, int z, ChunkContext context) {
int h = getEngine().getMantle().trueHeight(x, z);
int ha = getEngine().getMantle().trueHeight(x + 1, z);
int hb = getEngine().getMantle().trueHeight(x, z + 1);
@@ -136,7 +137,7 @@ public class IrisPostModifier extends EngineAssignedModifier<BlockData> {
}
// Wall Patcher
IrisBiome biome = getComplex().getTrueBiomeStream().get(x, z);
IrisBiome biome = context.getBiome().get(currentPostX, currentPostZ);
if(getDimension().isPostProcessingWalls()) {
if(!biome.getWall().getPalette().isEmpty()) {

View File

@@ -122,6 +122,8 @@ public class IrisObjectPlacement {
@ArrayType(min = 1, type = IrisObjectLoot.class)
@Desc("The loot tables to apply to these objects")
private KList<IrisObjectLoot> loot = new KList<>();
@Desc("Whether the given loot tables override any and all other loot tables available in the dimension, region or biome.")
private boolean overrideGlobalLoot = false;
@Desc("This object / these objects override the following trees when they grow...")
@ArrayType(min = 1, type = IrisTree.class)
private KList<IrisTree> trees = new KList<>();
@@ -201,6 +203,8 @@ public class IrisObjectPlacement {
TableCache tc = new TableCache();
for(IrisObjectLoot loot : getLoot()) {
if(loot == null)
continue;
IrisLootTable table = manager.getLootLoader().load(loot.getName());
if(table == null) {
Iris.warn("Couldn't find loot table " + loot.getName());

View File

@@ -40,6 +40,7 @@ import com.volmit.iris.util.scheduling.ChronoLatch;
import com.volmit.iris.util.scheduling.J;
import com.volmit.iris.util.scheduling.Looper;
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
import com.volmit.iris.util.stream.utility.ProfiledStream;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Setter;
@@ -276,7 +277,6 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
public @NotNull ChunkData generateChunkData(@NotNull World world, @NotNull Random ignored, int x, int z, @NotNull BiomeGrid biome) {
try {
getEngine(world);
loadLock.acquire();
computeStudioGenerator();
TerrainChunk tc = TerrainChunk.create(world, biome);
this.world.bind(world);
@@ -293,10 +293,9 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
ChunkData c = tc.getRaw();
Iris.debug("Generated " + x + " " + z);
loadLock.release();
return c;
} catch(Throwable e) {
loadLock.release();
Iris.error("======================================");
e.printStackTrace();
Iris.reportErrorChunk(x, z, e, "CHUNK");

View File

@@ -0,0 +1,27 @@
package com.volmit.iris.util.cache;
import com.volmit.iris.util.data.ChunkCache;
import com.volmit.iris.util.function.Function2;
import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.function.Function;
public class ChunkCache2D<T> {
private final AtomicReferenceArray<T> cache;
public ChunkCache2D() {
this.cache = new AtomicReferenceArray<>(256);
}
public T get(int x, int z, Function2<Integer, Integer, T> resolver) {
int key = ((z & 15) * 16) + (x & 15);
T t = cache.get(key);
if(t == null) {
t = resolver.apply(x, z);
cache.set(key, t);
}
return t;
}
}

View File

@@ -0,0 +1,34 @@
package com.volmit.iris.util.cache;
import com.volmit.iris.engine.data.cache.Cache;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.context.ChunkedDataCache;
import com.volmit.iris.util.data.KCache;
import com.volmit.iris.util.function.Function2;
import com.volmit.iris.util.mantle.Mantle;
import com.volmit.iris.util.scheduling.ChronoLatch;
import it.unimi.dsi.fastutil.longs.Long2LongMaps;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
public class WorldCache2D<T> {
private final KCache<Long, ChunkCache2D<T>> chunks;
private final Function2<Integer, Integer, T> resolver;
public WorldCache2D(Function2<Integer, Integer, T> resolver) {
this.resolver = resolver;
chunks = new KCache<>((x) -> new ChunkCache2D<>(), 1024);
}
public T get(int x, int z) {
ChunkCache2D<T> chunk = chunks.get(Cache.key(x >> 4, z >> 4));
return chunk.get(x, z, resolver);
}
public long getSize() {
return chunks.getSize() * 256L;
}
}

View File

@@ -0,0 +1,53 @@
package com.volmit.iris.util.context;
import com.volmit.iris.engine.IrisComplex;
import com.volmit.iris.engine.object.IrisBiome;
import com.volmit.iris.engine.object.IrisRegion;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.MultiBurst;
import lombok.Data;
import org.bukkit.block.data.BlockData;
@Data
public class ChunkContext {
private final int x;
private final int z;
private ChunkedDataCache<Double> height;
private ChunkedDataCache<IrisBiome> biome;
private ChunkedDataCache<IrisBiome> cave;
private ChunkedDataCache<BlockData> rock;
private ChunkedDataCache<BlockData> fluid;
private ChunkedDataCache<IrisRegion> region;
@BlockCoordinates
public ChunkContext(int x, int z, IrisComplex c) {
this(x, z, c, true);
}
@BlockCoordinates
public ChunkContext(int x, int z, IrisComplex c, boolean cache) {
this.x = x;
this.z = z;
if(cache) {
BurstExecutor b = MultiBurst.burst.burst();
height = new ChunkedDataCache<>(b, c.getHeightStream(), x, z);
biome = new ChunkedDataCache<>(b, c.getTrueBiomeStream(), x, z);
cave = new ChunkedDataCache<>(b, c.getCaveBiomeStream(), x, z);
rock = new ChunkedDataCache<>(b, c.getRockStream(), x, z);
fluid = new ChunkedDataCache<>(b, c.getFluidStream(), x, z);
region = new ChunkedDataCache<>(b, c.getRegionStream(), x, z);
b.complete();
}
else {
height = new ChunkedDataCache<>(null, c.getHeightStream(), x, z, false);
biome = new ChunkedDataCache<>(null, c.getTrueBiomeStream(), x, z, false);
cave = new ChunkedDataCache<>(null, c.getCaveBiomeStream(), x, z, false);
rock = new ChunkedDataCache<>(null, c.getRockStream(), x, z, false);
fluid = new ChunkedDataCache<>(null, c.getFluidStream(), x, z, false);
region = new ChunkedDataCache<>(null, c.getRegionStream(), x, z, false);
}
}
}

View File

@@ -0,0 +1,63 @@
package com.volmit.iris.util.context;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.documentation.BlockCoordinates;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.stream.ProceduralStream;
import lombok.Data;
import java.util.HashSet;
@Data
public class ChunkedDataCache<T> {
private final int x;
private final int z;
private final KSet<T> uniques;
private final Object[] data;
private final boolean cache;
private final ProceduralStream<T> stream;
@BlockCoordinates
public ChunkedDataCache(BurstExecutor burst, ProceduralStream<T> stream, int x, int z) {
this(burst, stream, x, z, true);
}
@BlockCoordinates
public ChunkedDataCache(BurstExecutor burst, ProceduralStream<T> stream, int x, int z, boolean cache) {
this.stream = stream;
this.cache = cache;
this.x = x;
this.z = z;
this.uniques = cache ? new KSet<>() : null;
if(cache) {
data = new Object[256];
int i,j;
for(i = 0; i < 16; i++) {
int finalI = i;
for(j = 0; j < 16; j++) {
int finalJ = j;
burst.queue(() -> {
T t = stream.get(x+ finalI, z+ finalJ);
data[(finalJ * 16) + finalI] = t;
uniques.add(t);
});
}
}
}
else {
data = new Object[0];
}
}
@SuppressWarnings("unchecked")
@BlockCoordinates
public T get(int x, int z) {
if(!cache) {
return stream.get(this.x + x, this.z + z);
}
T t = (T) data[(z * 16) + x];
return t == null ? stream.get(this.x + x, this.z + z) : t;
}
}

View File

@@ -28,11 +28,26 @@ import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class IrisContext {
private static final KMap<Thread, IrisContext> context = new KMap<>();
private static ChronoLatch cl = new ChronoLatch(60000);
private final Engine engine;
private ChunkContext chunkContext;
public IrisContext(Engine engine) {
this.engine = engine;
}
public static IrisContext getOr(Engine engine) {
IrisContext c = get();
if(c == null) {
c = new IrisContext(engine);
touch(c);
}
return c;
}
public static IrisContext get() {
return context.get(Thread.currentThread());

View File

@@ -24,6 +24,10 @@ import com.github.benmanes.caffeine.cache.LoadingCache;
import com.volmit.iris.engine.framework.MeteredCache;
import com.volmit.iris.util.math.RollingSequence;
import java.time.Duration;
import java.time.temporal.TemporalUnit;
import java.util.concurrent.TimeUnit;
public class KCache<K, V> implements MeteredCache {
private final long max;
private CacheLoader<K, V> loader;
@@ -46,7 +50,6 @@ public class KCache<K, V> implements MeteredCache {
return Caffeine
.newBuilder()
.maximumSize(max)
.softValues()
.initialCapacity((int) (max))
.build((k) -> loader == null ? null : loader.load(k));
}

View File

@@ -65,8 +65,7 @@ public class ChunkDataHunkHolder extends AtomicHunk<BlockData> {
for(int k = 0; k < getDepth(); k++) {
BlockData b = super.getRaw(j, i, k);
if(b != null)
{
if(b != null) {
chunk.setBlock(j, i + chunk.getMinHeight(), k, b);
}
}

View File

@@ -18,12 +18,14 @@
package com.volmit.iris.util.hunk.view;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.hunk.Hunk;
import org.bukkit.block.data.BlockData;
import org.bukkit.generator.ChunkGenerator.ChunkData;
@SuppressWarnings("ClassCanBeRecord")
public class ChunkDataHunkView implements Hunk<BlockData> {
private static final BlockData AIR = B.getAir();
private final ChunkData chunk;
public ChunkDataHunkView(ChunkData chunk) {
@@ -54,17 +56,44 @@ public class ChunkDataHunkView implements Hunk<BlockData> {
chunk.setRegion(x1, y1 + chunk.getMinHeight(), z1, x2, y2 + chunk.getMinHeight(), z2, t);
}
public BlockData get(int x, int y, int z) {
return getRaw(x, y, z);
}
public void set(int x, int y, int z, BlockData t) {
setRaw(x, y, z, t);
}
@Override
public void setRaw(int x, int y, int z, BlockData t) {
if(t == null) {
return;
}
chunk.setBlock(x, y + chunk.getMinHeight(), z, t);
try {
chunk.setBlock(x, y + chunk.getMinHeight(), z, t);
}
catch(Throwable ignored)
{
}
}
@Override
public BlockData getRaw(int x, int y, int z) {
return chunk.getBlockData(x, y + chunk.getMinHeight(), z);
try {
return chunk.getBlockData(x, y + chunk.getMinHeight(), z);
}
catch(Throwable e)
{
}
return AIR;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -22,31 +22,7 @@ import com.volmit.iris.util.function.NoiseProvider;
public class Starcast {
public static double starcast(int x, int z, double r, double checks, boolean optimized, NoiseProvider n) {
if(optimized) {
if(checks == 3) return sc3(x, z, r, n);
else if(checks == 5) return sc5(x, z, r, n);
else if(checks == 6) return sc6(x, z, r, n);
else if(checks == 7) return sc7(x, z, r, n);
else if(checks == 9) return sc9(x, z, r, n);
else if(checks == 12) return sc12(x, z, r, n);
else if(checks == 24) return sc24(x, z, r, n);
else if(checks == 32) return sc32(x, z, r, n);
else if(checks == 48) return sc48(x, z, r, n);
else if(checks == 64) return sc64(x, z, r, n);
}
double m = 360D / checks;
double v = 0;
for(int i = 0; i < 360; i += m) {
double sin = Math.sin(Math.toRadians(i));
double cos = Math.cos(Math.toRadians(i));
double cx = x + ((r * cos) - (r * sin));
double cz = z + ((r * sin) + (r * cos));
v += n.noise(cx, cz);
}
return v / checks;
return CompiledStarcast.getStarcast((float)x, (float)z, (float)r, (float)checks, n);
}
public static double starcast(int x, int z, double r, double checks, NoiseProvider n) {

View File

@@ -24,7 +24,6 @@ import com.volmit.iris.engine.object.IrisPosition;
import com.volmit.iris.util.collection.KSet;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.BlockPosition;
import com.volmit.iris.util.scheduling.J;
import org.bukkit.World;
import org.bukkit.block.data.BlockData;
import org.bukkit.entity.Entity;

View File

@@ -25,11 +25,15 @@ import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.object.IRare;
import com.volmit.iris.engine.object.IrisStyledRange;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.function.Function2;
import com.volmit.iris.util.function.Function3;
import com.volmit.iris.util.function.Function4;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.math.RNG;
import com.volmit.iris.util.parallel.BurstExecutor;
import com.volmit.iris.util.parallel.GridLock;
import com.volmit.iris.util.parallel.MultiBurst;
import com.volmit.iris.util.stream.arithmetic.AddingStream;
import com.volmit.iris.util.stream.arithmetic.ClampedStream;
import com.volmit.iris.util.stream.arithmetic.CoordinateBitShiftLeftStream;
@@ -59,13 +63,16 @@ import com.volmit.iris.util.stream.interpolation.Interpolated;
import com.volmit.iris.util.stream.sources.FunctionStream;
import com.volmit.iris.util.stream.utility.CachedStream2D;
import com.volmit.iris.util.stream.utility.CachedStream3D;
import com.volmit.iris.util.stream.utility.ContextInjectingStream;
import com.volmit.iris.util.stream.utility.NullSafeStream;
import com.volmit.iris.util.stream.utility.ProfiledStream;
import com.volmit.iris.util.stream.utility.SemaphoreStream;
import com.volmit.iris.util.stream.utility.SynchronizedStream;
import com.volmit.iris.util.stream.utility.WasteDetector;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
@SuppressWarnings("ALL")
@@ -111,7 +118,7 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
}
default ProceduralStream<T> profile() {
return profile(10);
return profile(256);
}
default ProceduralStream<T> profile(int memory) {
@@ -130,10 +137,20 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
return new AddingStream<>(this, a);
}
default ProceduralStream<T> contextInjecting(Function3<ChunkContext, Integer, Integer, T> contextAccessor) {
//return this;
return new ContextInjectingStream<>(this, contextAccessor);
}
default ProceduralStream<T> add(ProceduralStream<Double> a) {
return add2D((x, z) -> a.get(x, z));
}
default ProceduralStream<T> waste(String name) {
return this;
//return new WasteDetector<T>(this, name);
}
default ProceduralStream<T> subtract(ProceduralStream<Double> a) {
return subtract2D((x, z) -> a.get(x, z));
}
@@ -290,7 +307,7 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
return new To3DStream<T>(this);
}
default ProceduralStream<T> cache2D(String name, Engine engine, int size) {
default CachedStream2D<T> cache2D(String name, Engine engine, int size) {
return new CachedStream2D<T>(name, engine, this, size);
}
@@ -406,6 +423,48 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
}, Interpolated.DOUBLE);
}
default Hunk<T> fastFill2DParallel(int x, int z) {
Hunk<T> hunk = Hunk.newAtomicHunk(16, 16, 1);
BurstExecutor e = MultiBurst.burst.burst(256);
int i,j;
for(i = 0; i < 16; i++) {
for(j = 0; j < 16; j++) {
int fi = i;
int fj = j;
e.queue(() -> hunk.setRaw(fi, fj, 0, get(x+ fi, z+ fj)));
}
}
e.complete();
return hunk;
}
default void fastFill2DParallel(Hunk<T> hunk, BurstExecutor e, int x, int z) {
int i,j;
for(i = 0; i < 16; i++) {
for(j = 0; j < 16; j++) {
int fi = i;
int fj = j;
e.queue(() -> hunk.setRaw(fi, fj, 0, get(x+ fi, z+ fj)));
}
}
}
default Hunk<T> fastFill2D(int x, int z) {
Hunk<T> hunk = Hunk.newArrayHunk(16, 16, 1);
int i,j;
for(i = 0; i < 16; i++) {
for(j = 0; j < 16; j++) {
hunk.setRaw(i, j, 0, get(x+ i, z+ j));
}
}
return hunk;
}
default ProceduralStream<T> fit(double inMin, double inMax, double min, double max) {
return new FittedStream<T>(this, inMin, inMax, min, max);
}

View File

@@ -23,20 +23,24 @@ import com.volmit.iris.core.service.PreservationSVC;
import com.volmit.iris.engine.data.cache.Cache;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.engine.framework.MeteredCache;
import com.volmit.iris.util.cache.WorldCache2D;
import com.volmit.iris.util.data.KCache;
import com.volmit.iris.util.hunk.Hunk;
import com.volmit.iris.util.hunk.storage.ArrayHunk;
import com.volmit.iris.util.stream.BasicStream;
import com.volmit.iris.util.stream.ProceduralStream;
public class CachedStream2D<T> extends BasicStream<T> implements ProceduralStream<T>, MeteredCache {
private final ProceduralStream<T> stream;
private final KCache<Long, T> cache;
private final WorldCache2D<T> cache;
private final Engine engine;
private boolean chunked = true;
public CachedStream2D(String name, Engine engine, ProceduralStream<T> stream, int size) {
super();
this.stream = stream;
this.engine = engine;
cache = new KCache<>(k -> stream.get(Cache.keyX(k), Cache.keyZ(k)), size);
cache = new WorldCache2D<>(stream::get);
Iris.service(PreservationSVC.class).registerCache(this);
}
@@ -52,7 +56,8 @@ public class CachedStream2D<T> extends BasicStream<T> implements ProceduralStrea
@Override
public T get(double x, double z) {
return cache.get(Cache.key((int) x, (int) z));
//return stream.get(x, z);
return cache.get((int) x, (int) z);
}
@Override
@@ -67,12 +72,12 @@ public class CachedStream2D<T> extends BasicStream<T> implements ProceduralStrea
@Override
public KCache<?, ?> getRawCache() {
return cache;
return null;
}
@Override
public long getMaxSize() {
return cache.getMaxSize();
return 256 * 32;
}
@Override

View File

@@ -0,0 +1,53 @@
package com.volmit.iris.util.stream.utility;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.context.ChunkContext;
import com.volmit.iris.util.context.IrisContext;
import com.volmit.iris.util.function.Function3;
import com.volmit.iris.util.stream.BasicStream;
import com.volmit.iris.util.stream.ProceduralStream;
import java.util.concurrent.atomic.AtomicInteger;
public class ContextInjectingStream<T> extends BasicStream<T> {
private final Function3<ChunkContext, Integer, Integer, T> contextAccessor;
public ContextInjectingStream(ProceduralStream<T> stream, Function3<ChunkContext, Integer, Integer, T> contextAccessor) {
super(stream);
this.contextAccessor = contextAccessor;
}
@Override
public T get(double x, double z) {
IrisContext context = IrisContext.get();
if(context != null) {
ChunkContext chunkContext = context.getChunkContext();
if(chunkContext != null && (int)x >> 4 == chunkContext.getX() >> 4 && (int)z >> 4 == chunkContext.getZ() >> 4) {
T t = contextAccessor.apply(chunkContext, (int)x&15, (int)z&15);
if(t != null) {
return t;
}
}
}
return getTypedSource().get(x, z);
}
@Override
public T get(double x, double y, double z) {
return getTypedSource().get(x, y, z);
}
@Override
public double toDouble(T t) {
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d) {
return getTypedSource().fromDouble(d);
}
}

View File

@@ -34,11 +34,13 @@ public class ProfiledStream<T> extends BasicStream<T> {
public static final AtomicInteger ids = new AtomicInteger();
private final int id;
private final RollingSequence metrics;
public static final KList<ProfiledStream<?>> profiles = new KList<>();
public ProfiledStream(ProceduralStream<T> stream, int memory) {
super(stream);
this.metrics = new RollingSequence(memory);
this.id = ids.getAndAdd(1);
profiles.add(this);
}
public static void print(Consumer<String> printer, ProceduralStream<?> stream) {

View File

@@ -0,0 +1,80 @@
package com.volmit.iris.util.stream.utility;
import com.volmit.iris.Iris;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.format.C;
import com.volmit.iris.util.stream.BasicStream;
import com.volmit.iris.util.stream.ProceduralStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
public class WasteDetector<T> extends BasicStream<T> {
public static final boolean checking = false;
private static final KMap<String, Integer> allAccesses = new KMap<>();
private static final KMap<String, List<Throwable>> allThrows = new KMap<>();
private final AtomicInteger accesses;
private final String name;
public WasteDetector(ProceduralStream<T> stream, String name) {
super(stream);
this.name = name;
accesses = new AtomicInteger(0);
}
@Override
public T get(double x, double z) {
if(checking)
{
if(x == 7 && z == 7) {
// AHHHAAA!
allAccesses.compute(name, (k, v) -> v == null ? 1 : v + 1);
try {
throw new RuntimeException();
}
catch(RuntimeException e) {
allThrows.computeIfAbsent(name, (k) -> new KList<>()).add(e);
}
}
}
return getTypedSource().get(x, z);
}
public static void printAll() {
if(checking)
{
Iris.warn("=========================================================");
for(String i : allAccesses.sortKNumber().reverse()) {
Iris.warn(i + ": " + allAccesses.get(i) + " Time(s)");
}
Iris.warn("=========================================================");
for(String i : allAccesses.sortKNumber().reverse()) {
Iris.warn("======== "+ i + " ========");
for(Throwable j : allThrows.get(i)) {
j.printStackTrace();
}
Iris.warn("---------------------------------------------------------");
}
Iris.warn("=========================================================");
}
}
@Override
public T get(double x, double y, double z) {
return getTypedSource().get(x, y, z);
}
@Override
public double toDouble(T t) {
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d) {
return getTypedSource().fromDouble(d);
}
}