Mirror von
https://github.com/IntellectualSites/FastAsyncWorldEdit.git
synchronisiert 2024-12-25 10:30:04 +01:00
fix compile
Dieser Commit ist enthalten in:
Ursprung
f0ab1d5f18
Commit
1ad040f7d0
@ -39,13 +39,15 @@ import com.sk89q.worldedit.world.registry.LegacyMapper;
|
|||||||
import net.jpountz.lz4.LZ4BlockInputStream;
|
import net.jpountz.lz4.LZ4BlockInputStream;
|
||||||
import net.jpountz.lz4.LZ4BlockOutputStream;
|
import net.jpountz.lz4.LZ4BlockOutputStream;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
// TODO FIXME
|
// TODO FIXME
|
||||||
public class SchematicStreamer extends NBTStreamer {
|
public class SchematicStreamer implements Closeable {
|
||||||
private final UUID uuid;
|
private final UUID uuid;
|
||||||
|
private final NBTInputStream input;
|
||||||
private FastByteArrayOutputStream idOut = new FastByteArrayOutputStream();
|
private FastByteArrayOutputStream idOut = new FastByteArrayOutputStream();
|
||||||
private FastByteArrayOutputStream dataOut = new FastByteArrayOutputStream();
|
private FastByteArrayOutputStream dataOut = new FastByteArrayOutputStream();
|
||||||
private FastByteArrayOutputStream addOut;
|
private FastByteArrayOutputStream addOut;
|
||||||
@ -55,148 +57,148 @@ public class SchematicStreamer extends NBTStreamer {
|
|||||||
private FaweOutputStream adds;
|
private FaweOutputStream adds;
|
||||||
|
|
||||||
public SchematicStreamer(NBTInputStream stream, UUID uuid) {
|
public SchematicStreamer(NBTInputStream stream, UUID uuid) {
|
||||||
super(stream);
|
this.input = stream;
|
||||||
this.uuid = uuid;
|
this.uuid = uuid;
|
||||||
clipboard = new BlockArrayClipboard(new CuboidRegion(BlockVector3.at(0, 0, 0), BlockVector3.at(0, 0, 0)), fc);
|
clipboard = new BlockArrayClipboard(new CuboidRegion(BlockVector3.at(0, 0, 0), BlockVector3.at(0, 0, 0)), fc);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addBlockReaders() throws IOException {
|
public void addBlockReaders() throws IOException {
|
||||||
NBTStreamReader<? extends Integer, ? extends Integer> idInit = new NBTStreamReader<Integer, Integer>() {
|
// NBTStreamReader<? extends Integer, ? extends Integer> idInit = new NBTStreamReader<Integer, Integer>() {
|
||||||
@Override
|
// @Override
|
||||||
public void accept(Integer length, Integer type) {
|
// public void accept(Integer length, Integer type) {
|
||||||
ids = new FaweOutputStream(new LZ4BlockOutputStream(idOut));
|
// ids = new FaweOutputStream(new LZ4BlockOutputStream(idOut));
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
NBTStreamReader<? extends Integer, ? extends Integer> dataInit = new NBTStreamReader<Integer, Integer>() {
|
// NBTStreamReader<? extends Integer, ? extends Integer> dataInit = new NBTStreamReader<Integer, Integer>() {
|
||||||
@Override
|
// @Override
|
||||||
public void accept(Integer length, Integer type) {
|
// public void accept(Integer length, Integer type) {
|
||||||
datas = new FaweOutputStream(new LZ4BlockOutputStream(dataOut));
|
// datas = new FaweOutputStream(new LZ4BlockOutputStream(dataOut));
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
NBTStreamReader<? extends Integer, ? extends Integer> addInit = new NBTStreamReader<Integer, Integer>() {
|
// NBTStreamReader<? extends Integer, ? extends Integer> addInit = new NBTStreamReader<Integer, Integer>() {
|
||||||
@Override
|
// @Override
|
||||||
public void accept(Integer length, Integer type) {
|
// public void accept(Integer length, Integer type) {
|
||||||
addOut = new FastByteArrayOutputStream();
|
// addOut = new FastByteArrayOutputStream();
|
||||||
adds = new FaweOutputStream(new LZ4BlockOutputStream(addOut));
|
// adds = new FaweOutputStream(new LZ4BlockOutputStream(addOut));
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
addReader("Schematic.Blocks", NBTStreamer.ReadType.INFO, idInit);
|
// addReader("Schematic.Blocks", NBTStreamer.ReadType.INFO, idInit);
|
||||||
addReader("Schematic.Data", NBTStreamer.ReadType.INFO, dataInit);
|
// addReader("Schematic.Data", NBTStreamer.ReadType.INFO, dataInit);
|
||||||
addReader("Schematic.AddBlocks", NBTStreamer.ReadType.INFO, addInit);
|
// addReader("Schematic.AddBlocks", NBTStreamer.ReadType.INFO, addInit);
|
||||||
addReader("Schematic.Blocks", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
// addReader("Schematic.Blocks", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
||||||
@Override
|
// @Override
|
||||||
public void run(int index, int value) {
|
// public void run(int index, int value) {
|
||||||
try {
|
// try {
|
||||||
ids.write(value);
|
// ids.write(value);
|
||||||
} catch (IOException e) {
|
// } catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
// throw new RuntimeException(e);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
addReader("Schematic.Data", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
// addReader("Schematic.Data", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
||||||
@Override
|
// @Override
|
||||||
public void run(int index, int value) {
|
// public void run(int index, int value) {
|
||||||
try {
|
// try {
|
||||||
datas.write(value);
|
// datas.write(value);
|
||||||
} catch (IOException e) {
|
// } catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
// throw new RuntimeException(e);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
addReader("Schematic.AddBlocks", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
// addReader("Schematic.AddBlocks", NBTStreamer.ReadType.ELEM, new ByteReader() {
|
||||||
@Override
|
// @Override
|
||||||
public void run(int index, int value) {
|
// public void run(int index, int value) {
|
||||||
if (value != 0) {
|
// if (value != 0) {
|
||||||
int first = value & 0x0F;
|
// int first = value & 0x0F;
|
||||||
int second = (value & 0xF0) >> 4;
|
// int second = (value & 0xF0) >> 4;
|
||||||
try {
|
// try {
|
||||||
if (first != 0) adds.write(first);
|
// if (first != 0) adds.write(first);
|
||||||
if (second != 0) adds.write(second);
|
// if (second != 0) adds.write(second);
|
||||||
} catch (IOException e) {
|
// } catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
// throw new RuntimeException(e);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
ByteReader biomeReader = new ByteReader() {
|
// ByteReader biomeReader = new ByteReader() {
|
||||||
@Override
|
// @Override
|
||||||
public void run(int index, int value) {
|
// public void run(int index, int value) {
|
||||||
BiomeType biome = BiomeTypes.getLegacy(value);
|
// BiomeType biome = BiomeTypes.getLegacy(value);
|
||||||
if (biome != null) {
|
// if (biome != null) {
|
||||||
fc.setBiome(index, biome);
|
// fc.setBiome(index, biome);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
NBTStreamReader<Integer, Integer> initializer23 = new NBTStreamReader<Integer, Integer>() {
|
// NBTStreamReader<Integer, Integer> initializer23 = new NBTStreamReader<Integer, Integer>() {
|
||||||
@Override
|
// @Override
|
||||||
public void accept(Integer value1, Integer value2) {
|
// public void accept(Integer value1, Integer value2) {
|
||||||
if (fc == null) setupClipboard(length * width * height);
|
// if (fc == null) setupClipboard(length * width * height);
|
||||||
}
|
// }
|
||||||
};
|
// };
|
||||||
addReader("Schematic.AWEBiomes", NBTStreamer.ReadType.INFO,initializer23);
|
// addReader("Schematic.AWEBiomes", NBTStreamer.ReadType.INFO,initializer23);
|
||||||
addReader("Schematic.Biomes", NBTStreamer.ReadType.INFO,initializer23);
|
// addReader("Schematic.Biomes", NBTStreamer.ReadType.INFO,initializer23);
|
||||||
addReader("Schematic.AWEBiomes", NBTStreamer.ReadType.ELEM,biomeReader); // AWE stores as an int[]
|
// addReader("Schematic.AWEBiomes", NBTStreamer.ReadType.ELEM,biomeReader); // AWE stores as an int[]
|
||||||
addReader("Schematic.Biomes", NBTStreamer.ReadType.ELEM,biomeReader); // FAWE stores as a byte[] (4x smaller)
|
// addReader("Schematic.Biomes", NBTStreamer.ReadType.ELEM,biomeReader); // FAWE stores as a byte[] (4x smaller)
|
||||||
|
//
|
||||||
// Tiles
|
// // Tiles
|
||||||
addReader("Schematic.TileEntities", NBTStreamer.ReadType.ELEM,(BiConsumer<Integer, CompoundTag>) (index, value) -> {
|
// addReader("Schematic.TileEntities", NBTStreamer.ReadType.ELEM,(BiConsumer<Integer, CompoundTag>) (index, value) -> {
|
||||||
if (fc == null) {
|
// if (fc == null) {
|
||||||
setupClipboard(0);
|
// setupClipboard(0);
|
||||||
}
|
// }
|
||||||
int x = value.getInt("x");
|
// int x = value.getInt("x");
|
||||||
int y = value.getInt("y");
|
// int y = value.getInt("y");
|
||||||
int z = value.getInt("z");
|
// int z = value.getInt("z");
|
||||||
fc.setTile(x, y, z, value);
|
// fc.setTile(x, y, z, value);
|
||||||
});
|
// });
|
||||||
// Entities
|
// // Entities
|
||||||
addReader("Schematic.Entities", NBTStreamer.ReadType.ELEM,(BiConsumer<Integer, CompoundTag>) (index, compound) -> {
|
// addReader("Schematic.Entities", NBTStreamer.ReadType.ELEM,(BiConsumer<Integer, CompoundTag>) (index, compound) -> {
|
||||||
if (fc == null) {
|
// if (fc == null) {
|
||||||
setupClipboard(0);
|
// setupClipboard(0);
|
||||||
}
|
// }
|
||||||
String id = compound.getString("id");
|
// String id = compound.getString("id");
|
||||||
if (id.isEmpty()) {
|
// if (id.isEmpty()) {
|
||||||
return;
|
// return;
|
||||||
}
|
// }
|
||||||
EntityType type = EntityTypes.parse(id);
|
// EntityType type = EntityTypes.parse(id);
|
||||||
if (type != null) {
|
// if (type != null) {
|
||||||
compound.getValue().put("Id", new StringTag(type.getId()));
|
// compound.getValue().put("Id", new StringTag(type.getId()));
|
||||||
BaseEntity state = new BaseEntity(type, compound);
|
// BaseEntity state = new BaseEntity(type, compound);
|
||||||
|
//
|
||||||
Location loc = compound.getEntityLocation(fc);
|
// Location loc = compound.getEntityLocation(fc);
|
||||||
fc.createEntity(loc, state);
|
// fc.createEntity(loc, state);
|
||||||
} else {
|
// } else {
|
||||||
Fawe.debug("Invalid entity: " + id);
|
// Fawe.debug("Invalid entity: " + id);
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFully() throws IOException {
|
|
||||||
super.readFully();
|
|
||||||
if (ids != null) ids.close();
|
|
||||||
if (datas != null) datas.close();
|
|
||||||
if (adds != null) adds.close();
|
|
||||||
FaweInputStream idIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(idOut.toByteArrays())));
|
|
||||||
FaweInputStream dataIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(dataOut.toByteArrays())));
|
|
||||||
|
|
||||||
LegacyMapper remap = LegacyMapper.getInstance();
|
|
||||||
BlockVector3 dimensions = fc.getDimensions();
|
|
||||||
int length = dimensions.getBlockX() * dimensions.getBlockY() * dimensions.getBlockZ();
|
|
||||||
if (adds == null) {
|
|
||||||
for (int i = 0; i < length; i++) {
|
|
||||||
fc.setBlock(i, remap.getBlockFromLegacyCombinedId(((idIn.read() & 0xFF) << 4) + (dataIn.read() & 0xF)));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FaweInputStream addIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(dataOut.toByteArrays())));
|
|
||||||
for (int i = 0; i < length; i++) {
|
|
||||||
fc.setBlock(i, remap.getBlockFromLegacyCombinedId(((addIn.read() & 0xFF) << 8) + ((idIn.read() & 0xFF) << 4) + (dataIn.read() & 0xF)));
|
|
||||||
}
|
|
||||||
addIn.close();
|
|
||||||
}
|
|
||||||
idIn.close();
|
|
||||||
dataIn.close();
|
|
||||||
}
|
}
|
||||||
|
//
|
||||||
|
// @Override
|
||||||
|
// public void readFully() throws IOException {
|
||||||
|
// super.readFully();
|
||||||
|
// if (ids != null) ids.close();
|
||||||
|
// if (datas != null) datas.close();
|
||||||
|
// if (adds != null) adds.close();
|
||||||
|
// FaweInputStream idIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(idOut.toByteArrays())));
|
||||||
|
// FaweInputStream dataIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(dataOut.toByteArrays())));
|
||||||
|
//
|
||||||
|
// LegacyMapper remap = LegacyMapper.getInstance();
|
||||||
|
// BlockVector3 dimensions = fc.getDimensions();
|
||||||
|
// int length = dimensions.getBlockX() * dimensions.getBlockY() * dimensions.getBlockZ();
|
||||||
|
// if (adds == null) {
|
||||||
|
// for (int i = 0; i < length; i++) {
|
||||||
|
// fc.setBlock(i, remap.getBlockFromLegacyCombinedId(((idIn.read() & 0xFF) << 4) + (dataIn.read() & 0xF)));
|
||||||
|
// }
|
||||||
|
// } else {
|
||||||
|
// FaweInputStream addIn = new FaweInputStream(new LZ4BlockInputStream(new FastByteArraysInputStream(dataOut.toByteArrays())));
|
||||||
|
// for (int i = 0; i < length; i++) {
|
||||||
|
// fc.setBlock(i, remap.getBlockFromLegacyCombinedId(((addIn.read() & 0xFF) << 8) + ((idIn.read() & 0xFF) << 4) + (dataIn.read() & 0xF)));
|
||||||
|
// }
|
||||||
|
// addIn.close();
|
||||||
|
// }
|
||||||
|
// idIn.close();
|
||||||
|
// dataIn.close();
|
||||||
|
// }
|
||||||
|
|
||||||
private void fixStates() {
|
private void fixStates() {
|
||||||
for (BlockVector3 pos : fc) {
|
for (BlockVector3 pos : fc) {
|
||||||
@ -330,23 +332,23 @@ public class SchematicStreamer extends NBTStreamer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void addDimensionReaders() {
|
public void addDimensionReaders() {
|
||||||
addReader("Schematic.Height",
|
// addReader("Schematic.Height",
|
||||||
(BiConsumer<Integer, Short>) (index, value) -> height = (value));
|
// (BiConsumer<Integer, Short>) (index, value) -> height = (value));
|
||||||
addReader("Schematic.Width", (BiConsumer<Integer, Short>) (index, value) -> width = (value));
|
// addReader("Schematic.Width", (BiConsumer<Integer, Short>) (index, value) -> width = (value));
|
||||||
addReader("Schematic.Length",
|
// addReader("Schematic.Length",
|
||||||
(BiConsumer<Integer, Short>) (index, value) -> length = (value));
|
// (BiConsumer<Integer, Short>) (index, value) -> length = (value));
|
||||||
addReader("Schematic.WEOriginX",
|
// addReader("Schematic.WEOriginX",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> originX = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> originX = (value));
|
||||||
addReader("Schematic.WEOriginY",
|
// addReader("Schematic.WEOriginY",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> originY = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> originY = (value));
|
||||||
addReader("Schematic.WEOriginZ",
|
// addReader("Schematic.WEOriginZ",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> originZ = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> originZ = (value));
|
||||||
addReader("Schematic.WEOffsetX",
|
// addReader("Schematic.WEOffsetX",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> offsetX = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> offsetX = (value));
|
||||||
addReader("Schematic.WEOffsetY",
|
// addReader("Schematic.WEOffsetY",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> offsetY = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> offsetY = (value));
|
||||||
addReader("Schematic.WEOffsetZ",
|
// addReader("Schematic.WEOffsetZ",
|
||||||
(BiConsumer<Integer, Integer>) (index, value) -> offsetZ = (value));
|
// (BiConsumer<Integer, Integer>) (index, value) -> offsetZ = (value));
|
||||||
}
|
}
|
||||||
|
|
||||||
private int height;
|
private int height;
|
||||||
@ -401,7 +403,7 @@ public class SchematicStreamer extends NBTStreamer {
|
|||||||
setupClipboard(0);
|
setupClipboard(0);
|
||||||
addDimensionReaders();
|
addDimensionReaders();
|
||||||
addBlockReaders();
|
addBlockReaders();
|
||||||
readFully();
|
// readFully();
|
||||||
BlockVector3 min = BlockVector3.at(originX, originY, originZ);
|
BlockVector3 min = BlockVector3.at(originX, originY, originZ);
|
||||||
BlockVector3 offset = BlockVector3.at(offsetX, offsetY, offsetZ);
|
BlockVector3 offset = BlockVector3.at(offsetX, offsetY, offsetZ);
|
||||||
BlockVector3 origin = min.subtract(offset);
|
BlockVector3 origin = min.subtract(offset);
|
||||||
@ -419,4 +421,9 @@ public class SchematicStreamer extends NBTStreamer {
|
|||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
this.input.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,6 @@ package com.boydti.fawe.object.brush.visualization.cfi;
|
|||||||
|
|
||||||
import com.boydti.fawe.FaweCache;
|
import com.boydti.fawe.FaweCache;
|
||||||
import com.boydti.fawe.beta.IChunkSet;
|
import com.boydti.fawe.beta.IChunkSet;
|
||||||
import com.boydti.fawe.jnbt.NBTStreamer;
|
|
||||||
import com.boydti.fawe.object.RunnableVal2;
|
|
||||||
import com.boydti.fawe.object.collection.BitArray4096;
|
import com.boydti.fawe.object.collection.BitArray4096;
|
||||||
import com.boydti.fawe.object.collection.BlockVector3ChunkMap;
|
import com.boydti.fawe.object.collection.BlockVector3ChunkMap;
|
||||||
import com.boydti.fawe.object.io.FastByteArrayOutputStream;
|
import com.boydti.fawe.object.io.FastByteArrayOutputStream;
|
||||||
@ -11,10 +9,7 @@ import com.boydti.fawe.util.MathMan;
|
|||||||
import com.sk89q.jnbt.CompoundTag;
|
import com.sk89q.jnbt.CompoundTag;
|
||||||
import com.sk89q.jnbt.ListTag;
|
import com.sk89q.jnbt.ListTag;
|
||||||
import com.sk89q.jnbt.NBTConstants;
|
import com.sk89q.jnbt.NBTConstants;
|
||||||
import com.sk89q.jnbt.NBTInputStream;
|
|
||||||
import com.sk89q.jnbt.NBTOutputStream;
|
import com.sk89q.jnbt.NBTOutputStream;
|
||||||
import com.sk89q.jnbt.StringTag;
|
|
||||||
import com.sk89q.jnbt.Tag;
|
|
||||||
import com.sk89q.worldedit.math.BlockVector2;
|
import com.sk89q.worldedit.math.BlockVector2;
|
||||||
import com.sk89q.worldedit.math.BlockVector3;
|
import com.sk89q.worldedit.math.BlockVector3;
|
||||||
import com.sk89q.worldedit.registry.state.Property;
|
import com.sk89q.worldedit.registry.state.Property;
|
||||||
@ -26,14 +21,12 @@ import com.sk89q.worldedit.world.block.BlockStateHolder;
|
|||||||
import com.sk89q.worldedit.world.block.BlockType;
|
import com.sk89q.worldedit.world.block.BlockType;
|
||||||
import com.sk89q.worldedit.world.block.BlockTypes;
|
import com.sk89q.worldedit.world.block.BlockTypes;
|
||||||
|
|
||||||
import java.io.DataInputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
@ -76,112 +69,6 @@ public class WritableMCAChunk implements IChunkSet {
|
|||||||
public BlockState[] palette;
|
public BlockState[] palette;
|
||||||
}
|
}
|
||||||
|
|
||||||
public WritableMCAChunk(NBTInputStream nis, int chunkX, int chunkZ, boolean readPos) throws IOException {
|
|
||||||
this.chunkX = chunkX;
|
|
||||||
this.chunkZ = chunkZ;
|
|
||||||
NBTStreamer streamer = new NBTStreamer(nis);
|
|
||||||
streamer.addReader(".Level.InhabitedTime", new RunnableVal2<Integer, Long>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, Long value) {
|
|
||||||
inhabitedTime = value;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.LastUpdate", new RunnableVal2<Integer, Long>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, Long value) {
|
|
||||||
lastUpdate = value;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
Section section = new Section();
|
|
||||||
streamer.addReader(".Level.Sections.Y", new RunnableVal2<Integer, Byte>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, Byte y) {
|
|
||||||
section.layer = y;
|
|
||||||
readLayer(section);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.Sections.Palette", NBTStreamer.ReadType.ELEM,new RunnableVal2<Integer, CompoundTag>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, CompoundTag compound) {
|
|
||||||
String name = compound.getString("Name");
|
|
||||||
BlockType type = BlockTypes.get(name);
|
|
||||||
BlockState state = type.getDefaultState();
|
|
||||||
CompoundTag properties = (CompoundTag) compound.getValue().get("Properties");
|
|
||||||
if (properties != null) {
|
|
||||||
for (Map.Entry<String, Tag> entry : properties.getValue().entrySet()) {
|
|
||||||
String key = entry.getKey();
|
|
||||||
String value = ((StringTag) entry.getValue()).getValue();
|
|
||||||
Property property = type.getProperty(key);
|
|
||||||
state = state.with(property, property.getValueFor(value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
section.palette[index] = state;
|
|
||||||
readLayer(section);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.Sections", NBTStreamer.ReadType.INFO,new RunnableVal2<Integer, Integer>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer value1, Integer value2) {
|
|
||||||
section.layer = -1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.Sections.BlockStates", new RunnableVal2<Integer, long[]>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer value1, long[] values) {
|
|
||||||
section.blocks = values;
|
|
||||||
readLayer(section);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.TileEntities", NBTStreamer.ReadType.ELEM,new RunnableVal2<Integer, CompoundTag>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, CompoundTag tile) {
|
|
||||||
int x = tile.getInt("x") & 15;
|
|
||||||
int y = tile.getInt("y");
|
|
||||||
int z = tile.getInt("z") & 15;
|
|
||||||
tiles.put(x, y, z, tile);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.Entities", NBTStreamer.ReadType.ELEM,new RunnableVal2<Integer, CompoundTag>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, CompoundTag entityTag) {
|
|
||||||
long least = entityTag.getLong("UUIDLeast");
|
|
||||||
long most = entityTag.getLong("UUIDMost");
|
|
||||||
entities.put(new UUID(most, least), entityTag);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.Biomes", new RunnableVal2<Integer, byte[]>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, byte[] value) {
|
|
||||||
for (int i = 0; i < 256; i++) {
|
|
||||||
biomes[i] = value[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// streamer.addReader(".Level.HeightMap", new RunnableVal2<Integer, int[]>() {
|
|
||||||
// @Override
|
|
||||||
// public void run(Integer index, int[] value) {
|
|
||||||
// heightMap = value;
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
if (readPos) {
|
|
||||||
streamer.addReader(".Level.xPos", new RunnableVal2<Integer, Integer>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, Integer value) {
|
|
||||||
WritableMCAChunk.this.chunkX = value;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
streamer.addReader(".Level.zPos", new RunnableVal2<Integer, Integer>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer index, Integer value) {
|
|
||||||
WritableMCAChunk.this.chunkZ = value;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
streamer.readFully();
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getX() {
|
public int getX() {
|
||||||
return chunkX;
|
return chunkX;
|
||||||
}
|
}
|
||||||
|
@ -1,681 +0,0 @@
|
|||||||
package com.sk89q.jnbt.anvil;
|
|
||||||
|
|
||||||
import com.boydti.fawe.Fawe;
|
|
||||||
import com.boydti.fawe.beta.IBlocks;
|
|
||||||
import com.boydti.fawe.jnbt.NBTStreamer;
|
|
||||||
import com.boydti.fawe.object.RunnableVal;
|
|
||||||
import com.boydti.fawe.object.RunnableVal4;
|
|
||||||
import com.boydti.fawe.object.brush.visualization.cfi.WritableMCAChunk;
|
|
||||||
import com.boydti.fawe.object.collection.CleanableThreadLocal;
|
|
||||||
import com.boydti.fawe.object.exception.FaweException;
|
|
||||||
import com.boydti.fawe.object.io.BufferedRandomAccessFile;
|
|
||||||
import com.boydti.fawe.object.io.FastByteArrayInputStream;
|
|
||||||
import com.boydti.fawe.util.MainUtil;
|
|
||||||
import com.boydti.fawe.util.MathMan;
|
|
||||||
import com.sk89q.jnbt.NBTInputStream;
|
|
||||||
import com.sk89q.worldedit.world.World;
|
|
||||||
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
|
|
||||||
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
|
|
||||||
import java.io.BufferedInputStream;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.RandomAccessFile;
|
|
||||||
import java.lang.reflect.Field;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ForkJoinPool;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.zip.Inflater;
|
|
||||||
import java.util.zip.InflaterInputStream;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Chunk format: http://minecraft.gamepedia.com/Chunk_format#Entity_format
|
|
||||||
* e.g.: `.Level.Entities.#` (Starts with a . as the root tag is unnamed)
|
|
||||||
*/
|
|
||||||
public class MCAFile {
|
|
||||||
|
|
||||||
private static Field fieldBuf2;
|
|
||||||
private static Field fieldBuf3;
|
|
||||||
|
|
||||||
static {
|
|
||||||
try {
|
|
||||||
fieldBuf2 = InflaterInputStream.class.getDeclaredField("buf");
|
|
||||||
fieldBuf2.setAccessible(true);
|
|
||||||
fieldBuf3 = NBTInputStream.class.getDeclaredField("buf");
|
|
||||||
fieldBuf3.setAccessible(true);
|
|
||||||
} catch (Throwable e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final World world;
|
|
||||||
private final File file;
|
|
||||||
private RandomAccessFile raf;
|
|
||||||
private byte[] locations;
|
|
||||||
private boolean deleted;
|
|
||||||
private final int X, Z;
|
|
||||||
private final Int2ObjectOpenHashMap<WritableMCAChunk> chunks = new Int2ObjectOpenHashMap<>();
|
|
||||||
|
|
||||||
final ThreadLocal<byte[]> byteStore1 = new ThreadLocal<byte[]>() {
|
|
||||||
@Override
|
|
||||||
protected byte[] initialValue() {
|
|
||||||
return new byte[4096];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
final ThreadLocal<byte[]> byteStore2 = new ThreadLocal<byte[]>() {
|
|
||||||
@Override
|
|
||||||
protected byte[] initialValue() {
|
|
||||||
return new byte[4096];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
final ThreadLocal<byte[]> byteStore3 = new ThreadLocal<byte[]>() {
|
|
||||||
@Override
|
|
||||||
protected byte[] initialValue() {
|
|
||||||
return new byte[1024];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public MCAFile(World world, File file) throws FileNotFoundException {
|
|
||||||
this.world = world;
|
|
||||||
this.file = file;
|
|
||||||
if (!file.exists()) {
|
|
||||||
throw new FileNotFoundException(file.getName());
|
|
||||||
}
|
|
||||||
String[] split = file.getName().split("\\.");
|
|
||||||
X = Integer.parseInt(split[1]);
|
|
||||||
Z = Integer.parseInt(split[2]);
|
|
||||||
}
|
|
||||||
|
|
||||||
public MCAFile(World world, int mcrX, int mcrZ) {
|
|
||||||
this(world, mcrX, mcrZ, new File(world.getStoragePath().toFile(), "r." + mcrX + "." + mcrZ + ".mca"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public MCAFile(World world, int mcrX, int mcrZ, File file) {
|
|
||||||
this.world = world;
|
|
||||||
this.file = file;
|
|
||||||
X = mcrX;
|
|
||||||
Z = mcrZ;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void clear() {
|
|
||||||
if (raf != null) {
|
|
||||||
try {
|
|
||||||
raf.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
synchronized (chunks) {
|
|
||||||
chunks.clear();
|
|
||||||
}
|
|
||||||
locations = null;
|
|
||||||
CleanableThreadLocal.clean(byteStore1);
|
|
||||||
CleanableThreadLocal.clean(byteStore2);
|
|
||||||
CleanableThreadLocal.clean(byteStore3);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void finalize() throws Throwable {
|
|
||||||
CleanableThreadLocal.clean(byteStore1);
|
|
||||||
CleanableThreadLocal.clean(byteStore2);
|
|
||||||
CleanableThreadLocal.clean(byteStore3);
|
|
||||||
super.finalize();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDeleted(boolean deleted) {
|
|
||||||
this.deleted = deleted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isDeleted() {
|
|
||||||
return deleted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public World getWorld() {
|
|
||||||
return world;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Loads the location header from disk
|
|
||||||
*/
|
|
||||||
public void init() {
|
|
||||||
try {
|
|
||||||
if (raf == null) {
|
|
||||||
this.locations = new byte[4096];
|
|
||||||
if (file != null) {
|
|
||||||
this.raf = new RandomAccessFile(file, "rw");
|
|
||||||
if (raf.length() < 8192) {
|
|
||||||
raf.setLength(8192);
|
|
||||||
} else {
|
|
||||||
raf.seek(0);
|
|
||||||
raf.readFully(locations);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Throwable e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getX() {
|
|
||||||
return X;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getZ() {
|
|
||||||
return Z;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RandomAccessFile getRandomAccessFile() {
|
|
||||||
return raf;
|
|
||||||
}
|
|
||||||
|
|
||||||
public File getFile() {
|
|
||||||
return file;
|
|
||||||
}
|
|
||||||
|
|
||||||
public WritableMCAChunk getCachedChunk(int cx, int cz) {
|
|
||||||
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
|
|
||||||
synchronized (chunks) {
|
|
||||||
return chunks.get(pair);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setChunk(WritableMCAChunk chunk) {
|
|
||||||
int cx = chunk.getX();
|
|
||||||
int cz = chunk.getZ();
|
|
||||||
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
|
|
||||||
synchronized (chunks) {
|
|
||||||
chunks.put(pair, chunk);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public WritableMCAChunk getChunk(int cx, int cz) throws IOException {
|
|
||||||
WritableMCAChunk cached = getCachedChunk(cx, cz);
|
|
||||||
if (cached != null) {
|
|
||||||
return cached;
|
|
||||||
} else {
|
|
||||||
return readChunk(cx, cz);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public WritableMCAChunk readChunk(int cx, int cz) throws IOException {
|
|
||||||
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF))) << 12;
|
|
||||||
int size = (locations[i + 3] & 0xFF) << 12;
|
|
||||||
if (offset == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
NBTInputStream nis = getChunkIS(offset);
|
|
||||||
WritableMCAChunk chunk = new WritableMCAChunk(nis, cx, cz, false);
|
|
||||||
nis.close();
|
|
||||||
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
|
|
||||||
synchronized (chunks) {
|
|
||||||
chunks.put(pair, chunk);
|
|
||||||
}
|
|
||||||
return chunk;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* CX, CZ, OFFSET, SIZE
|
|
||||||
*
|
|
||||||
* @param onEach
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public void forEachSortedChunk(RunnableVal4<Integer, Integer, Integer, Integer> onEach) throws IOException {
|
|
||||||
char[] offsets = new char[(int) (raf.length() / 4096) - 2];
|
|
||||||
Arrays.fill(offsets, Character.MAX_VALUE);
|
|
||||||
char i = 0;
|
|
||||||
for (int z = 0; z < 32; z++) {
|
|
||||||
for (int x = 0; x < 32; x++, i += 4) {
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF))) - 2;
|
|
||||||
int size = locations[i + 3] & 0xFF;
|
|
||||||
if (size != 0) {
|
|
||||||
if (offset < offsets.length) {
|
|
||||||
offsets[offset] = i;
|
|
||||||
} else {
|
|
||||||
Fawe.debug("Ignoring invalid offset " + offset);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (i = 0; i < offsets.length; i++) {
|
|
||||||
int index = offsets[i];
|
|
||||||
if (index != Character.MAX_VALUE) {
|
|
||||||
int offset = i + 2;
|
|
||||||
int size = locations[index + 3] & 0xFF;
|
|
||||||
int index2 = index >> 2;
|
|
||||||
int x = (index2) & 31;
|
|
||||||
int z = (index2) >> 5;
|
|
||||||
onEach.run(x, z, offset << 12, size << 12);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param onEach cx, cz, offset, size
|
|
||||||
*/
|
|
||||||
public void forEachChunk(RunnableVal4<Integer, Integer, Integer, Integer> onEach) {
|
|
||||||
int i = 0;
|
|
||||||
for (int z = 0; z < 32; z++) {
|
|
||||||
for (int x = 0; x < 32; x++, i += 4) {
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
|
|
||||||
int size = locations[i + 3] & 0xFF;
|
|
||||||
if (size != 0) {
|
|
||||||
onEach.run(x, z, offset << 12, size << 12);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void forEachChunk(RunnableVal<WritableMCAChunk> onEach) {
|
|
||||||
int i = 0;
|
|
||||||
for (int z = 0; z < 32; z++) {
|
|
||||||
for (int x = 0; x < 32; x++, i += 4) {
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
|
|
||||||
int size = locations[i + 3] & 0xFF;
|
|
||||||
if (size != 0) {
|
|
||||||
try {
|
|
||||||
onEach.run(getChunk(x, z));
|
|
||||||
} catch (Throwable ignore) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getOffset(int cx, int cz) {
|
|
||||||
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
|
|
||||||
return offset << 12;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getSize(int cx, int cz) {
|
|
||||||
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
|
|
||||||
return (locations[i + 3] & 0xFF) << 12;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Integer> getChunks() {
|
|
||||||
final List<Integer> values;
|
|
||||||
synchronized (chunks) {
|
|
||||||
values = new ArrayList<>(chunks.size());
|
|
||||||
}
|
|
||||||
for (int i = 0; i < locations.length; i += 4) {
|
|
||||||
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
|
|
||||||
values.add(offset);
|
|
||||||
}
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
public byte[] getChunkCompressedBytes(int offset) throws IOException {
|
|
||||||
if (offset == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
synchronized (raf) {
|
|
||||||
raf.seek(offset);
|
|
||||||
int size = raf.readInt();
|
|
||||||
int compression = raf.read();
|
|
||||||
byte[] data = new byte[size];
|
|
||||||
raf.readFully(data);
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private NBTInputStream getChunkIS(int offset) throws IOException {
|
|
||||||
try {
|
|
||||||
byte[] data = getChunkCompressedBytes(offset);
|
|
||||||
FastByteArrayInputStream bais = new FastByteArrayInputStream(data);
|
|
||||||
InflaterInputStream iis = new InflaterInputStream(bais, new Inflater(), 1);
|
|
||||||
fieldBuf2.set(iis, byteStore2.get());
|
|
||||||
BufferedInputStream bis = new BufferedInputStream(iis);
|
|
||||||
NBTInputStream nis = new NBTInputStream(bis);
|
|
||||||
fieldBuf3.set(nis, byteStore3.get());
|
|
||||||
return nis;
|
|
||||||
} catch (IllegalAccessException unlikely) {
|
|
||||||
unlikely.printStackTrace();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void streamChunk(int cx, int cz, RunnableVal<NBTStreamer> addReaders) throws IOException {
|
|
||||||
streamChunk(getOffset(cx, cz), addReaders);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void streamChunk(int offset, RunnableVal<NBTStreamer> withStream) throws IOException {
|
|
||||||
byte[] data = getChunkCompressedBytes(offset);
|
|
||||||
streamChunk(data, withStream);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void streamChunk(byte[] data, RunnableVal<NBTStreamer> withStream) throws IOException {
|
|
||||||
if (data != null) {
|
|
||||||
try {
|
|
||||||
FastByteArrayInputStream nbtIn = new FastByteArrayInputStream(data);
|
|
||||||
FastByteArrayInputStream bais = new FastByteArrayInputStream(data);
|
|
||||||
InflaterInputStream iis = new InflaterInputStream(bais, new Inflater(), 1);
|
|
||||||
fieldBuf2.set(iis, byteStore2.get());
|
|
||||||
BufferedInputStream bis = new BufferedInputStream(iis);
|
|
||||||
NBTInputStream nis = new NBTInputStream(bis);
|
|
||||||
fieldBuf3.set(nis, byteStore3.get());
|
|
||||||
NBTStreamer streamer = new NBTStreamer(nis);
|
|
||||||
withStream.run(streamer);
|
|
||||||
streamer.readQuick();
|
|
||||||
} catch (IllegalAccessException unlikely) {
|
|
||||||
unlikely.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param onEach chunk
|
|
||||||
*/
|
|
||||||
public void forEachCachedChunk(RunnableVal<WritableMCAChunk> onEach) {
|
|
||||||
synchronized (chunks) {
|
|
||||||
for (Map.Entry<Integer, WritableMCAChunk> entry : chunks.entrySet()) {
|
|
||||||
onEach.run(entry.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<WritableMCAChunk> getCachedChunks() {
|
|
||||||
synchronized (chunks) {
|
|
||||||
return new ArrayList<>(chunks.values());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void uncache(int cx, int cz) {
|
|
||||||
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
|
|
||||||
synchronized (chunks) {
|
|
||||||
chunks.remove(pair);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private byte[] toBytes(WritableMCAChunk chunk) throws Exception {
|
|
||||||
if (chunk.isDeleted()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
byte[] uncompressed = chunk.toBytes(byteStore3.get());
|
|
||||||
byte[] compressed = MainUtil.compress(uncompressed, byteStore2.get(), null);
|
|
||||||
return compressed;
|
|
||||||
}
|
|
||||||
|
|
||||||
private byte[] getChunkBytes(int cx, int cz) throws Exception {
|
|
||||||
WritableMCAChunk mca = getCachedChunk(cx, cz);
|
|
||||||
if (mca == null) {
|
|
||||||
int offset = getOffset(cx, cz);
|
|
||||||
if (offset == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return getChunkCompressedBytes(offset);
|
|
||||||
}
|
|
||||||
return toBytes(mca);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void writeSafe(RandomAccessFile raf, int offset, byte[] data) throws IOException {
|
|
||||||
int len = data.length + 5;
|
|
||||||
raf.seek(offset);
|
|
||||||
if (raf.length() - offset < len) {
|
|
||||||
raf.setLength(((offset + len + 4095) / 4096) * 4096);
|
|
||||||
}
|
|
||||||
// Length of remaining data
|
|
||||||
raf.writeInt(data.length + 1);
|
|
||||||
// Compression type
|
|
||||||
raf.write(2);
|
|
||||||
raf.write(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void writeHeader(RandomAccessFile raf, int cx, int cz, int offsetMedium, int sizeByte, boolean writeTime) throws IOException {
|
|
||||||
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
|
|
||||||
locations[i] = (byte) (offsetMedium >> 16);
|
|
||||||
locations[i + 1] = (byte) (offsetMedium >> 8);
|
|
||||||
locations[i + 2] = (byte) (offsetMedium);
|
|
||||||
locations[i + 3] = (byte) sizeByte;
|
|
||||||
raf.seek(i);
|
|
||||||
raf.write((offsetMedium >> 16));
|
|
||||||
raf.write((offsetMedium >> 8));
|
|
||||||
raf.write((offsetMedium >> 0));
|
|
||||||
raf.write(sizeByte);
|
|
||||||
raf.seek(i + 4096);
|
|
||||||
if (offsetMedium == 0 && sizeByte == 0) {
|
|
||||||
raf.writeInt(0);
|
|
||||||
} else {
|
|
||||||
raf.writeInt((int) (System.currentTimeMillis() / 1000L));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void close(ForkJoinPool pool) {
|
|
||||||
if (raf == null) return;
|
|
||||||
synchronized (raf) {
|
|
||||||
if (raf != null) {
|
|
||||||
flush(pool);
|
|
||||||
try {
|
|
||||||
raf.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
raf = null;
|
|
||||||
locations = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isModified() {
|
|
||||||
if (isDeleted()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
synchronized (chunks) {
|
|
||||||
for (Int2ObjectMap.Entry<WritableMCAChunk> entry : chunks.int2ObjectEntrySet()) {
|
|
||||||
WritableMCAChunk chunk = entry.getValue();
|
|
||||||
if (chunk.isModified() || chunk.isDeleted()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write the chunk to the file
|
|
||||||
* @param pool
|
|
||||||
*/
|
|
||||||
public void flush(ForkJoinPool pool) {
|
|
||||||
synchronized (raf) {
|
|
||||||
// If the file is marked as deleted, nothing is written
|
|
||||||
if (isDeleted()) {
|
|
||||||
clear();
|
|
||||||
file.delete();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean wait; // If the flush method needs to wait for the pool
|
|
||||||
if (pool == null) {
|
|
||||||
wait = true;
|
|
||||||
pool = new ForkJoinPool();
|
|
||||||
} else wait = false;
|
|
||||||
|
|
||||||
// Chunks that need to be relocated
|
|
||||||
Int2ObjectOpenHashMap<byte[]> relocate = new Int2ObjectOpenHashMap<>();
|
|
||||||
// The position of each chunk
|
|
||||||
final Int2ObjectOpenHashMap<Integer> offsetMap = new Int2ObjectOpenHashMap<>(); // Offset -> <byte cx, byte cz, short size>
|
|
||||||
// The data of each modified chunk
|
|
||||||
final Int2ObjectOpenHashMap<byte[]> compressedMap = new Int2ObjectOpenHashMap<>();
|
|
||||||
// The data of each chunk that needs to be moved
|
|
||||||
final Int2ObjectOpenHashMap<byte[]> append = new Int2ObjectOpenHashMap<>();
|
|
||||||
boolean modified = false;
|
|
||||||
// Get the current time for the chunk timestamp
|
|
||||||
long now = System.currentTimeMillis();
|
|
||||||
|
|
||||||
// Load the chunks into the append or compressed map
|
|
||||||
for (WritableMCAChunk chunk : getCachedChunks()) {
|
|
||||||
if (chunk.isModified() || chunk.isDeleted()) {
|
|
||||||
modified = true;
|
|
||||||
chunk.setLastUpdate(now);
|
|
||||||
if (!chunk.isDeleted()) {
|
|
||||||
pool.submit(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
byte[] compressed = toBytes(chunk);
|
|
||||||
int pair = MathMan.pair((short) (chunk.getX() & 31), (short) (chunk.getZ() & 31));
|
|
||||||
Int2ObjectOpenHashMap map;
|
|
||||||
if (getOffset(chunk.getX(), chunk.getZ()) == 0) {
|
|
||||||
map = append;
|
|
||||||
} else {
|
|
||||||
map = compressedMap;
|
|
||||||
}
|
|
||||||
synchronized (map) {
|
|
||||||
map.put(pair, compressed);
|
|
||||||
}
|
|
||||||
} catch (Throwable e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If any changes were detected
|
|
||||||
if (modified) {
|
|
||||||
file.setLastModified(now);
|
|
||||||
|
|
||||||
// Load the offset data into the offset map
|
|
||||||
forEachChunk(new RunnableVal4<Integer, Integer, Integer, Integer>() {
|
|
||||||
@Override
|
|
||||||
public void run(Integer cx, Integer cz, Integer offset, Integer size) {
|
|
||||||
short pair1 = MathMan.pairByte((byte) (cx & 31), (byte) (cz & 31));
|
|
||||||
short pair2 = (short) (size >> 12);
|
|
||||||
offsetMap.put((int) offset, (Integer) MathMan.pair(pair1, pair2));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// Wait for previous tasks
|
|
||||||
pool.awaitQuiescence(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
|
|
||||||
|
|
||||||
|
|
||||||
int start = 8192;
|
|
||||||
int written = start;
|
|
||||||
int end = 8192;
|
|
||||||
int nextOffset = 8192;
|
|
||||||
try {
|
|
||||||
for (int count = 0; count < offsetMap.size(); count++) {
|
|
||||||
// Get the previous position of the next chunk
|
|
||||||
Integer loc = offsetMap.get(nextOffset);
|
|
||||||
while (loc == null) {
|
|
||||||
nextOffset += 4096;
|
|
||||||
loc = offsetMap.get(nextOffset);
|
|
||||||
}
|
|
||||||
int offset = nextOffset;
|
|
||||||
|
|
||||||
// Get the x/z from the paired location
|
|
||||||
short cxz = MathMan.unpairX(loc);
|
|
||||||
int cx = MathMan.unpairShortX(cxz);
|
|
||||||
int cz = MathMan.unpairShortY(cxz);
|
|
||||||
|
|
||||||
// Get the size from the pair
|
|
||||||
int size = MathMan.unpairY(loc) << 12;
|
|
||||||
|
|
||||||
nextOffset += size;
|
|
||||||
end = Math.min(start + size, end);
|
|
||||||
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
|
|
||||||
byte[] newBytes = relocate.get(pair);
|
|
||||||
|
|
||||||
// newBytes is null if the chunk isn't modified or marked for moving
|
|
||||||
if (newBytes == null) {
|
|
||||||
WritableMCAChunk cached = getCachedChunk(cx, cz);
|
|
||||||
// If the previous offset marks the current write position (start) then we only write the header
|
|
||||||
if (offset == start) {
|
|
||||||
if (cached == null || !cached.isModified()) {
|
|
||||||
writeHeader(raf, cx, cz, start >> 12, size >> 12, true);
|
|
||||||
start += size;
|
|
||||||
written = start + size;
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
newBytes = compressedMap.get(pair);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// The chunk needs to be moved, fetch the data if necessary
|
|
||||||
newBytes = compressedMap.get(pair);
|
|
||||||
if (newBytes == null) {
|
|
||||||
if (cached == null || !cached.isDeleted()) {
|
|
||||||
newBytes = getChunkCompressedBytes(getOffset(cx, cz));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (newBytes == null) {
|
|
||||||
writeHeader(raf, cx, cz, 0, 0, false);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The length to be written (compressed data + 5 byte chunk header)
|
|
||||||
int len = newBytes.length + 5;
|
|
||||||
int oldSize = (size + 4095) >> 12;
|
|
||||||
int newSize = (len + 4095) >> 12;
|
|
||||||
int nextOffset2 = end;
|
|
||||||
|
|
||||||
// If the current write position (start) + length of data to write (len) are longer than the position of the next chunk, we need to move the next chunks
|
|
||||||
while (start + len > end) {
|
|
||||||
Integer nextLoc = offsetMap.get(nextOffset2);
|
|
||||||
if (nextLoc != null) {
|
|
||||||
short nextCXZ = MathMan.unpairX(nextLoc);
|
|
||||||
int nextCX = MathMan.unpairShortX(nextCXZ);
|
|
||||||
int nextCZ = MathMan.unpairShortY(nextCXZ);
|
|
||||||
WritableMCAChunk cached = getCachedChunk(nextCX, nextCZ);
|
|
||||||
if (cached == null || !cached.isModified()) {
|
|
||||||
byte[] nextBytes = getChunkCompressedBytes(nextOffset2);
|
|
||||||
relocate.put(MathMan.pair((short) (nextCX & 31), (short) (nextCZ & 31)), nextBytes);
|
|
||||||
}
|
|
||||||
int nextSize = MathMan.unpairY(nextLoc) << 12;
|
|
||||||
end += nextSize;
|
|
||||||
nextOffset2 += nextSize;
|
|
||||||
} else {
|
|
||||||
end += 4096;
|
|
||||||
nextOffset2 += 4096;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Write the chunk + chunk header
|
|
||||||
writeSafe(raf, start, newBytes);
|
|
||||||
// Write the location data (beginning of file)
|
|
||||||
writeHeader(raf, cx, cz, start >> 12, newSize, true);
|
|
||||||
|
|
||||||
written = start + newBytes.length + 5;
|
|
||||||
start += newSize << 12;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write all the chunks which need to be appended
|
|
||||||
if (!append.isEmpty()) {
|
|
||||||
for (Int2ObjectMap.Entry<byte[]> entry : append.int2ObjectEntrySet()) {
|
|
||||||
int pair = entry.getIntKey();
|
|
||||||
short cx = MathMan.unpairX(pair);
|
|
||||||
short cz = MathMan.unpairY(pair);
|
|
||||||
byte[] bytes = entry.getValue();
|
|
||||||
int len = bytes.length + 5;
|
|
||||||
int newSize = (len + 4095) >> 12;
|
|
||||||
writeSafe(raf, start, bytes);
|
|
||||||
writeHeader(raf, cx, cz, start >> 12, newSize, true);
|
|
||||||
written = start + bytes.length + 5;
|
|
||||||
start += newSize << 12;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Round the file length, since the vanilla server doesn't like it for some reason
|
|
||||||
raf.setLength(4096 * ((written + 4095) / 4096));
|
|
||||||
if (raf instanceof BufferedRandomAccessFile) {
|
|
||||||
((BufferedRandomAccessFile) raf).flush();
|
|
||||||
}
|
|
||||||
raf.close();
|
|
||||||
} catch (Throwable e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
if (wait) {
|
|
||||||
pool.shutdown();
|
|
||||||
pool.awaitQuiescence(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
CleanableThreadLocal.clean(byteStore1);
|
|
||||||
CleanableThreadLocal.clean(byteStore2);
|
|
||||||
CleanableThreadLocal.clean(byteStore3);
|
|
||||||
}
|
|
||||||
}
|
|
@ -19,10 +19,7 @@
|
|||||||
|
|
||||||
package com.sk89q.worldedit.extent.clipboard.io;
|
package com.sk89q.worldedit.extent.clipboard.io;
|
||||||
|
|
||||||
import static com.google.common.base.Preconditions.checkNotNull;
|
import com.boydti.fawe.jnbt.streamer.IntValueReader;
|
||||||
|
|
||||||
import com.boydti.fawe.jnbt.NBTStreamer;
|
|
||||||
import com.boydti.fawe.object.clipboard.LinearClipboard;
|
|
||||||
import com.boydti.fawe.util.IOUtil;
|
import com.boydti.fawe.util.IOUtil;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.sk89q.jnbt.CompoundTag;
|
import com.sk89q.jnbt.CompoundTag;
|
||||||
@ -36,7 +33,6 @@ import com.sk89q.worldedit.WorldEdit;
|
|||||||
import com.sk89q.worldedit.entity.BaseEntity;
|
import com.sk89q.worldedit.entity.BaseEntity;
|
||||||
import com.sk89q.worldedit.entity.Entity;
|
import com.sk89q.worldedit.entity.Entity;
|
||||||
import com.sk89q.worldedit.extension.platform.Capability;
|
import com.sk89q.worldedit.extension.platform.Capability;
|
||||||
import com.sk89q.worldedit.extent.clipboard.BlockArrayClipboard;
|
|
||||||
import com.sk89q.worldedit.extent.clipboard.Clipboard;
|
import com.sk89q.worldedit.extent.clipboard.Clipboard;
|
||||||
import com.sk89q.worldedit.math.BlockVector2;
|
import com.sk89q.worldedit.math.BlockVector2;
|
||||||
import com.sk89q.worldedit.math.BlockVector3;
|
import com.sk89q.worldedit.math.BlockVector3;
|
||||||
@ -45,8 +41,10 @@ import com.sk89q.worldedit.world.biome.BiomeType;
|
|||||||
import com.sk89q.worldedit.world.biome.BiomeTypes;
|
import com.sk89q.worldedit.world.biome.BiomeTypes;
|
||||||
import com.sk89q.worldedit.world.block.BaseBlock;
|
import com.sk89q.worldedit.world.block.BaseBlock;
|
||||||
import com.sk89q.worldedit.world.block.BlockState;
|
import com.sk89q.worldedit.world.block.BlockState;
|
||||||
import com.sk89q.worldedit.world.block.BlockStateHolder;
|
|
||||||
import com.sk89q.worldedit.world.block.BlockTypes;
|
import com.sk89q.worldedit.world.block.BlockTypes;
|
||||||
|
import net.jpountz.lz4.LZ4BlockInputStream;
|
||||||
|
import net.jpountz.lz4.LZ4BlockOutputStream;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
@ -59,8 +57,8 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import net.jpountz.lz4.LZ4BlockInputStream;
|
|
||||||
import net.jpountz.lz4.LZ4BlockOutputStream;
|
import static com.google.common.base.Preconditions.checkNotNull;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes schematic files using the Sponge schematic format.
|
* Writes schematic files using the Sponge schematic format.
|
||||||
@ -242,9 +240,9 @@ public class SpongeSchematicWriter implements ClipboardWriter {
|
|||||||
int[] palette = new int[BiomeTypes.getMaxId() + 1];
|
int[] palette = new int[BiomeTypes.getMaxId() + 1];
|
||||||
Arrays.fill(palette, Integer.MAX_VALUE);
|
Arrays.fill(palette, Integer.MAX_VALUE);
|
||||||
int[] paletteMax = {0};
|
int[] paletteMax = {0};
|
||||||
NBTStreamer.ByteReader task = new NBTStreamer.ByteReader() {
|
IntValueReader task = new IntValueReader() {
|
||||||
@Override
|
@Override
|
||||||
public void run(int index, int ordinal) {
|
public void applyInt(int index, int ordinal) {
|
||||||
try {
|
try {
|
||||||
int value = palette[ordinal];
|
int value = palette[ordinal];
|
||||||
if (value == Integer.MAX_VALUE) {
|
if (value == Integer.MAX_VALUE) {
|
||||||
@ -268,7 +266,7 @@ public class SpongeSchematicWriter implements ClipboardWriter {
|
|||||||
int x0 = min.getBlockX() + x;
|
int x0 = min.getBlockX() + x;
|
||||||
BlockVector2 pt = BlockVector2.at(x0, z0);
|
BlockVector2 pt = BlockVector2.at(x0, z0);
|
||||||
BiomeType biome = clipboard.getBiome(pt);
|
BiomeType biome = clipboard.getBiome(pt);
|
||||||
task.run(i, biome.getInternalId());
|
task.applyInt(i, biome.getInternalId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
biomesOut.close();
|
biomesOut.close();
|
||||||
|
Laden…
In neuem Issue referenzieren
Einen Benutzer sperren