mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-12-28 11:39:07 +00:00
Fixes
This commit is contained in:
@@ -184,21 +184,21 @@ public class IrisComplex implements DataProvider {
|
||||
).convertAware2D(ProceduralStream::get).cache2D(cacheSize);
|
||||
inferredStreams.put(InferredType.CAVE, caveBiomeStream);
|
||||
landBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getLandBiomeStyle().create(rng.nextParallelRNG(InferredType.LAND.ordinal()), getData()).stream()
|
||||
.zoom(r.getLandBiomeZoom())
|
||||
.selectRarity(r.getLandBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.LAND))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
-> engine.getDimension().getLandBiomeStyle().create(rng.nextParallelRNG(InferredType.LAND.ordinal()), getData()).stream()
|
||||
.zoom(r.getLandBiomeZoom())
|
||||
.selectRarity(r.getLandBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.LAND))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
.cache2D(cacheSize);
|
||||
inferredStreams.put(InferredType.LAND, landBiomeStream);
|
||||
seaBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getSeaBiomeStyle().create(rng.nextParallelRNG(InferredType.SEA.ordinal()), getData()).stream()
|
||||
.zoom(r.getSeaBiomeZoom())
|
||||
.selectRarity(r.getSeaBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.SEA))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
-> engine.getDimension().getSeaBiomeStyle().create(rng.nextParallelRNG(InferredType.SEA.ordinal()), getData()).stream()
|
||||
.zoom(r.getSeaBiomeZoom())
|
||||
.selectRarity(r.getSeaBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.SEA))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
.cache2D(cacheSize);
|
||||
inferredStreams.put(InferredType.SEA, seaBiomeStream);
|
||||
shoreBiomeStream = regionStream.convert((r)
|
||||
@@ -240,18 +240,18 @@ public class IrisComplex implements DataProvider {
|
||||
return 1D;
|
||||
});
|
||||
trueBiomeStream = focus != null ? ProceduralStream.of((x, y) -> focus, Interpolated.of(a -> 0D,
|
||||
b -> focus)).convertAware2D((b, x, z) -> {
|
||||
for (IrisFeaturePositional i : engine.getFramework().getEngineParallax().forEachFeature(x, z)) {
|
||||
IrisBiome bx = i.filter(x, z, b, rng);
|
||||
b -> focus)).convertAware2D((b, x, z) -> {
|
||||
for (IrisFeaturePositional i : engine.getFramework().getEngineParallax().forEachFeature(x, z)) {
|
||||
IrisBiome bx = i.filter(x, z, b, rng);
|
||||
|
||||
if (bx != null) {
|
||||
bx.setInferredType(b.getInferredType());
|
||||
return bx;
|
||||
}
|
||||
}
|
||||
if (bx != null) {
|
||||
bx.setInferredType(b.getInferredType());
|
||||
return bx;
|
||||
}
|
||||
}
|
||||
|
||||
return b;
|
||||
})
|
||||
return b;
|
||||
})
|
||||
.cache2D(cacheSize) : heightStream
|
||||
.convertAware2D((h, x, z) ->
|
||||
fixBiomeType(h, baseBiomeStream.get(x, z),
|
||||
@@ -270,18 +270,18 @@ public class IrisComplex implements DataProvider {
|
||||
})
|
||||
.cache2D(cacheSize);
|
||||
trueBiomeStream = focus != null ? ProceduralStream.of((x, y) -> focus, Interpolated.of(a -> 0D,
|
||||
b -> focus)).convertAware2D((b, x, z) -> {
|
||||
for (IrisFeaturePositional i : engine.getFramework().getEngineParallax().forEachFeature(x, z)) {
|
||||
IrisBiome bx = i.filter(x, z, b, rng);
|
||||
b -> focus)).convertAware2D((b, x, z) -> {
|
||||
for (IrisFeaturePositional i : engine.getFramework().getEngineParallax().forEachFeature(x, z)) {
|
||||
IrisBiome bx = i.filter(x, z, b, rng);
|
||||
|
||||
if (bx != null) {
|
||||
bx.setInferredType(b.getInferredType());
|
||||
return bx;
|
||||
}
|
||||
}
|
||||
if (bx != null) {
|
||||
bx.setInferredType(b.getInferredType());
|
||||
return bx;
|
||||
}
|
||||
}
|
||||
|
||||
return b;
|
||||
})
|
||||
return b;
|
||||
})
|
||||
.cache2D(cacheSize) : heightStream
|
||||
.convertAware2D((h, x, z) ->
|
||||
fixBiomeType(h, baseBiomeStream.get(x, z),
|
||||
@@ -357,10 +357,10 @@ public class IrisComplex implements DataProvider {
|
||||
return m;
|
||||
}, Interpolated.INT).cache2D(cacheSize);
|
||||
baseBiomeIDStream = trueBiomeStream.convertAware2D((b, x, z) -> {
|
||||
UUID d = regionIDStream.get(x, z);
|
||||
return new UUID(b.getLoadKey().hashCode() * 818223L,
|
||||
d.hashCode());
|
||||
})
|
||||
UUID d = regionIDStream.get(x, z);
|
||||
return new UUID(b.getLoadKey().hashCode() * 818223L,
|
||||
d.hashCode());
|
||||
})
|
||||
.cache2D(cacheSize);
|
||||
islandTopStream = islandStream.convertAware2D((i, x, z) ->
|
||||
i ? heightStream.round()
|
||||
|
||||
@@ -179,23 +179,23 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
|
||||
|
||||
//@builder
|
||||
spawnRandomly(Stream.concat(Stream.concat(
|
||||
getData().getSpawnerLoader()
|
||||
.loadAll(getDimension().getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn),
|
||||
getData().getSpawnerLoader().streamAll(getEngine().getFramework().getEngineParallax()
|
||||
.getFeaturesInChunk(c).stream()
|
||||
.flatMap((o) -> o.getFeature().getEntitySpawners().stream()))
|
||||
.filter(this::canSpawn))
|
||||
.filter((i) -> i.isValid(biome))
|
||||
.flatMap(this::stream),
|
||||
Stream.concat(getData().getSpawnerLoader()
|
||||
.loadAll(getEngine().getRegion(c.getX() << 4, c.getZ() << 4).getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn)
|
||||
getData().getSpawnerLoader()
|
||||
.loadAll(getDimension().getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn),
|
||||
getData().getSpawnerLoader().streamAll(getEngine().getFramework().getEngineParallax()
|
||||
.getFeaturesInChunk(c).stream()
|
||||
.flatMap((o) -> o.getFeature().getEntitySpawners().stream()))
|
||||
.filter(this::canSpawn))
|
||||
.filter((i) -> i.isValid(biome))
|
||||
.flatMap(this::stream),
|
||||
getData().getSpawnerLoader()
|
||||
.loadAll(getEngine().getSurfaceBiome(c.getX() << 4, c.getZ() << 4).getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn)
|
||||
.flatMap(this::stream)))
|
||||
Stream.concat(getData().getSpawnerLoader()
|
||||
.loadAll(getEngine().getRegion(c.getX() << 4, c.getZ() << 4).getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn)
|
||||
.flatMap(this::stream),
|
||||
getData().getSpawnerLoader()
|
||||
.loadAll(getEngine().getSurfaceBiome(c.getX() << 4, c.getZ() << 4).getEntitySpawners())
|
||||
.shuffleCopy(RNG.r).stream().filter(this::canSpawn)
|
||||
.flatMap(this::stream)))
|
||||
.collect(Collectors.toList()))
|
||||
.popRandom(RNG.r, max).forEach((i) -> spawn(c, i));
|
||||
//@done
|
||||
|
||||
@@ -109,7 +109,7 @@ public class EngineCompositeGenerator extends ChunkGenerator implements IrisAcce
|
||||
lastHotloadTime += p.getMilliseconds();
|
||||
lastHotloadTime /= 2;
|
||||
|
||||
return 120 + (long) (lastHotloadTime / 2) + Math.min(hotloaderMisses * 125, 1375);
|
||||
return 120 + (lastHotloadTime / 2) + Math.min(hotloaderMisses * 125, 1375);
|
||||
}
|
||||
};
|
||||
ticker.setPriority(Thread.MIN_PRIORITY);
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
package com.volmit.iris.engine.jigsaw;
|
||||
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.project.loader.IrisData;
|
||||
import com.volmit.iris.core.tools.IrisWorlds;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
|
||||
@@ -75,8 +75,7 @@ public class PlannedStructure {
|
||||
|
||||
generateTerminators();
|
||||
|
||||
for(PlannedPiece i : pieces)
|
||||
{
|
||||
for (PlannedPiece i : pieces) {
|
||||
Iris.debug("Place: " + i.getObject().getLoadKey() + " at @ relative " + i.getPosition().toString());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,14 +74,14 @@ public class IrisCaverns {
|
||||
public ProceduralStream<Double> stream(RNG rng, IrisData data) {
|
||||
if (preThresholdInterpolation) {
|
||||
return streamCache.aquire(() -> ProceduralStream.of((xx, yy, zz)
|
||||
-> (getZone(xx, yy, zz, rng, data)
|
||||
.getCarved(rng, data, xx, yy, zz)), Interpolated.DOUBLE)
|
||||
-> (getZone(xx, yy, zz, rng, data)
|
||||
.getCarved(rng, data, xx, yy, zz)), Interpolated.DOUBLE)
|
||||
.cache3D(65535));
|
||||
}
|
||||
|
||||
return streamCache.aquire(() -> ProceduralStream.of((xx, yy, zz)
|
||||
-> (getZone(xx, yy, zz, rng, data)
|
||||
.isCarved(rng, data, xx, yy, zz) ? 1D : 0D), Interpolated.DOUBLE)
|
||||
-> (getZone(xx, yy, zz, rng, data)
|
||||
.isCarved(rng, data, xx, yy, zz) ? 1D : 0D), Interpolated.DOUBLE)
|
||||
.cache3D(65535));
|
||||
}
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ public class IrisDecorator {
|
||||
public CNG getVarianceGenerator(RNG rng, IrisData data) {
|
||||
return varianceGenerator.aquire(() ->
|
||||
variance.create(
|
||||
rng.nextParallelRNG(getBlockData(data).size()), data)
|
||||
rng.nextParallelRNG(getBlockData(data).size()), data)
|
||||
.scale(1D / variance.getZoom()));
|
||||
}
|
||||
|
||||
|
||||
@@ -348,7 +348,11 @@ public class IrisObject extends IrisRegistrant {
|
||||
}
|
||||
|
||||
public void write(File file) throws IOException {
|
||||
file.getParentFile().mkdirs();
|
||||
if(file == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
FileOutputStream out = new FileOutputStream(file);
|
||||
write(out);
|
||||
out.close();
|
||||
|
||||
Reference in New Issue
Block a user