9
0
mirror of https://github.com/VolmitSoftware/Iris.git synced 2025-12-19 15:09:18 +00:00

hopefully fix the DataContainer this time

This commit is contained in:
Julian Krings
2025-08-25 20:53:52 +02:00
parent 693a05f2cb
commit a7b4bf3ff2
4 changed files with 45 additions and 106 deletions

View File

@@ -19,12 +19,12 @@
package com.volmit.iris.util.hunk.bits;
import com.volmit.iris.util.data.Varint;
import lombok.Getter;
import org.apache.commons.lang3.Validate;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLongArray;
import java.util.function.IntConsumer;
@@ -52,8 +52,10 @@ public class DataBits {
0, 5};
private final AtomicLongArray data;
@Getter
private final int bits;
private final long mask;
@Getter
private final int size;
private final int valuesPerLong;
private final int divideMul;
@@ -149,18 +151,9 @@ public class DataBits {
return data;
}
public int getSize() {
return size;
}
public int getBits() {
return bits;
}
public DataBits setBits(int newBits) {
if (bits != newBits) {
DataBits newData = new DataBits(newBits, size);
AtomicInteger c = new AtomicInteger(0);
for (int i = 0; i < size; i++) {
newData.set(i, get(i));

View File

@@ -19,78 +19,32 @@
package com.volmit.iris.util.hunk.bits;
import com.volmit.iris.util.data.Varint;
import lombok.Synchronized;
import java.io.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class DataContainer<T> {
protected static final int INITIAL_BITS = 3;
protected static final int LINEAR_BITS_LIMIT = 4;
protected static final int LINEAR_INITIAL_LENGTH = (int) Math.pow(2, LINEAR_BITS_LIMIT) + 1;
protected static final int[] BIT = computeBitLimits();
private final AtomicReference<Palette<T>> palette;
private final AtomicReference<DataBits> data;
private final AtomicInteger bits;
private volatile Palette<T> palette;
private volatile DataBits data;
private final int length;
private final Writable<T> writer;
public DataContainer(Writable<T> writer, int length) {
this.writer = writer;
this.length = length;
this.bits = new AtomicInteger(INITIAL_BITS);
this.data = new AtomicReference<>(new DataBits(INITIAL_BITS, length));
this.palette = new AtomicReference<>(newPalette(INITIAL_BITS));
this.data = new DataBits(INITIAL_BITS, length);
this.palette = newPalette(INITIAL_BITS);
}
public DataContainer(DataInputStream din, Writable<T> writer) throws IOException {
this.writer = writer;
this.length = Varint.readUnsignedVarInt(din);
this.palette = new AtomicReference<>(newPalette(din));
this.data = new AtomicReference<>(new DataBits(palette.get().bits(), length, din));
this.bits = new AtomicInteger(palette.get().bits());
}
public static String readBitString(DataInputStream din) throws IOException {
DataContainer<Character> c = new DataContainer<>(din, new Writable<Character>() {
@Override
public Character readNodeData(DataInputStream din) throws IOException {
return din.readChar();
}
@Override
public void writeNodeData(DataOutputStream dos, Character character) throws IOException {
dos.writeChar(character);
}
});
StringBuilder sb = new StringBuilder();
for (int i = c.size() - 1; i >= 0; i--) {
sb.setCharAt(i, c.get(i));
}
return sb.toString();
}
public static void writeBitString(String s, DataOutputStream dos) throws IOException {
DataContainer<Character> c = new DataContainer<>(new Writable<Character>() {
@Override
public Character readNodeData(DataInputStream din) throws IOException {
return din.readChar();
}
@Override
public void writeNodeData(DataOutputStream dos, Character character) throws IOException {
dos.writeChar(character);
}
}, s.length());
for (int i = 0; i < s.length(); i++) {
c.set(i, s.charAt(i));
}
c.writeDos(dos);
this.palette = newPalette(din);
this.data = new DataBits(palette.bits(), length, din);
}
private static int[] computeBitLimits() {
@@ -117,17 +71,9 @@ public class DataContainer<T> {
return DataContainer.BIT.length - 1;
}
public DataBits getData() {
return data.get();
}
public Palette<T> getPalette() {
return palette.get();
}
public String toString() {
return "DataContainer <" + length + " x " + bits + " bits> -> Palette<" + palette.get().getClass().getSimpleName().replaceAll("\\QPalette\\E", "") + ">: " + palette.get().size() +
" " + data.get().toString() + " PalBit: " + palette.get().bits();
return "DataContainer <" + length + " x " + data.getBits() + " bits> -> Palette<" + palette.getClass().getSimpleName().replaceAll("\\QPalette\\E", "") + ">: " + palette.size() +
" " + data.toString() + " PalBit: " + palette.bits();
}
public byte[] write() throws IOException {
@@ -140,15 +86,14 @@ public class DataContainer<T> {
writeDos(new DataOutputStream(out));
}
@Synchronized
public void writeDos(DataOutputStream dos) throws IOException {
synchronized (this) {
Varint.writeUnsignedVarInt(length, dos);
Varint.writeUnsignedVarInt(palette.get().size(), dos);
palette.get().iterateIO((data, __) -> writer.writeNodeData(dos, data));
data.get().write(dos);
Varint.writeUnsignedVarInt(palette.size(), dos);
palette.iterateIO((data, __) -> writer.writeNodeData(dos, data));
data.write(dos);
dos.flush();
}
}
private Palette<T> newPalette(DataInputStream din) throws IOException {
int paletteSize = Varint.readUnsignedVarInt(din);
@@ -165,45 +110,44 @@ public class DataContainer<T> {
return new HashPalette<>();
}
@Synchronized
public void set(int position, T t) {
synchronized (this) {
int id = palette.get().id(t);
int id = palette.id(t);
if (id == -1) {
id = palette.get().add(t);
id = palette.add(t);
updateBits();
}
data.get().set(position, id);
}
data.set(position, id);
}
@Synchronized
@SuppressWarnings("NonAtomicOperationOnVolatileField")
private void updateBits() {
if (palette.get().bits() == bits.get())
if (palette.bits() == data.getBits())
return;
int bits = palette.get().bits();
if (this.bits.get() <= LINEAR_BITS_LIMIT != bits <= LINEAR_BITS_LIMIT) {
palette.updateAndGet(p -> newPalette(bits).from(p));
int bits = palette.bits();
if (data.getBits() <= LINEAR_BITS_LIMIT != bits <= LINEAR_BITS_LIMIT) {
palette = newPalette(bits).from(palette);
}
data.updateAndGet(d -> d.setBits(bits));
this.bits.set(bits);
data = data.setBits(bits);
}
@Synchronized
public T get(int position) {
synchronized (this) {
int id = data.get().get(position);
int id = data.get(position);
if (id <= 0) {
return null;
}
return palette.get().get(id);
}
return palette.get(id);
}
public int size() {
return getData().getSize();
return data.getSize();
}
}

View File

@@ -468,8 +468,8 @@ public class Mantle {
ioTectonicUnload.acquireUninterruptibly(LOCK_SIZE);
try {
for (long id : toUnload) {
double unloadTime = M.ms() - adjustedIdleDuration.get();
for (long id : toUnload) {
burst.queue(() -> hyperLock.withLong(id, () -> {
TectonicPlate m = loadedRegions.get(id);
if (m == null) {
@@ -490,6 +490,7 @@ public class Mantle {
}
try {
m.close();
worker.write(fileForRegion(dataFolder, id, false).getName(), m);
oldFileForRegion(dataFolder, id).delete();
loadedRegions.remove(id, m);
@@ -497,7 +498,7 @@ public class Mantle {
toUnload.remove(id);
i.incrementAndGet();
Iris.debug("Unloaded Tectonic Plate " + C.DARK_GREEN + Cache.keyX(id) + " " + Cache.keyZ(id));
} catch (IOException e) {
} catch (IOException | InterruptedException e) {
Iris.reportError(e);
}
}));

View File

@@ -141,6 +141,7 @@ public interface Matter {
long size = din.readInt();
if (size == 0) continue;
long start = din.count();
long end = start + size;
Iris.addPanic("read.matter.slice", i + "");
try {
@@ -150,9 +151,9 @@ public interface Matter {
Class<?> type = Class.forName(cn);
MatterSlice<?> slice = matter.createSlice(type, matter);
slice.read(din);
if (din.count() < end) throw new IOException("Matter slice read size mismatch!");
matter.putSlice(type, slice);
} catch (Throwable e) {
long end = start + size;
if (!(e instanceof ClassNotFoundException)) {
Iris.error("Failed to read matter slice, skipping it.");
Iris.addPanic("read.byte.range", start + " " + end);
@@ -165,7 +166,7 @@ public interface Matter {
din.skipTo(end);
}
if (din.count() != start + size) {
if (din.count() != end) {
throw new IOException("Matter slice read size mismatch!");
}
}