3
0
Mirror von https://github.com/IntellectualSites/FastAsyncWorldEdit.git synchronisiert 2024-11-05 19:10:07 +01:00
Dieser Commit ist enthalten in:
Jesse Boyd 2019-10-31 21:04:02 +01:00
Ursprung b533408022
Commit 9cf02fe654
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: 59F1DE6293AF6E1F
20 geänderte Dateien mit 928 neuen und 298 gelöschten Zeilen

Datei anzeigen

@ -58,7 +58,7 @@ public class BukkitGetBlocks_1_14 extends CharGetBlocks {
public Chunk nmsChunk;
public CraftWorld world;
public int X, Z;
private boolean forceLoad;
// private boolean forceLoad;
public BukkitGetBlocks_1_14(World world, int X, int Z, boolean forceLoad) {
this.world = (CraftWorld) world;
@ -76,6 +76,15 @@ public class BukkitGetBlocks_1_14 extends CharGetBlocks {
// }
// }
public int getX() {
return X;
}
public int getZ() {
return Z;
}
@Override
public BiomeType getBiomeType(int x, int z) {
BiomeBase base = getChunk().getBiomeIndex()[(z << 4) + x];

Datei anzeigen

@ -57,6 +57,8 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.annotation.Nullable;
import org.bukkit.Bukkit;
import org.bukkit.Effect;
import org.bukkit.TreeType;
import org.bukkit.World;

Datei anzeigen

@ -193,13 +193,13 @@ public interface IQueueExtent extends Flushable, Trimable, Extent, IBatchProcess
*/
boolean isEmpty();
default ChunkFilterBlock apply(ChunkFilterBlock block, Filter filter, Region region, int X, int Z) {
if (!filter.appliesChunk(X, Z)) {
default ChunkFilterBlock apply(ChunkFilterBlock block, Filter filter, Region region, int chunkX, int chunkZ) {
if (!filter.appliesChunk(chunkX, chunkZ)) {
return block;
}
IChunk chunk = this.getOrCreateChunk(X, Z);
IChunk chunk = this.getOrCreateChunk(chunkX, chunkZ);
// Initialize
chunk.init(this, X, Z);
chunk.init(this, chunkX, chunkZ);
IChunk newChunk = filter.applyChunk(chunk, region);
if (newChunk != null) {

Datei anzeigen

@ -1,105 +0,0 @@
package com.boydti.fawe.beta;
import com.sk89q.jnbt.CompoundTag;
import com.sk89q.worldedit.extent.Extent;
import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.block.BaseBlock;
import com.sk89q.worldedit.world.block.BlockState;
public class NorthVector extends BlockVector3 {
private final BlockVector3 parent;
public NorthVector(BlockVector3 parent) {
this.parent = parent;
}
// @Override
// public BlockVector3 south(BlockVector3 orDefault) {
// return parent;
// }
@Override
public int getX() {
return parent.getX();
}
@Override
public int getY() {
return parent.getY();
}
@Override
public int getZ() {
return parent.getZ();
}
@Override
public boolean setOrdinal(Extent orDefault, int ordinal) {
return orDefault.setBlock(this, BlockState.getFromOrdinal(ordinal));
}
@Override
public boolean setBlock(Extent orDefault, BlockState state) {
return orDefault.setBlock(this, state);
}
@Override
public boolean setFullBlock(Extent orDefault, BaseBlock block) {
return orDefault.setBlock(this, block);
}
@Override
public boolean setBiome(Extent orDefault, BiomeType biome) {
return orDefault.setBiome(getX(), getY(), getZ(), biome);
}
@Override
public int getOrdinal(Extent orDefault) {
return getBlock(orDefault).getOrdinal();
}
@Override
public char getOrdinalChar(Extent orDefault) {
return (char) getOrdinal(orDefault);
}
@Override
public BlockState getBlock(Extent orDefault) {
return orDefault.getBlock(this);
}
@Override
public BaseBlock getFullBlock(Extent orDefault) {
return orDefault.getFullBlock(this);
}
@Override
public CompoundTag getNbtData(Extent orDefault) {
return orDefault.getFullBlock(getX(), getY(), getZ()).getNbtData();
}
@Override
public BlockState getOrdinalBelow(Extent orDefault) {
return getStateRelative(orDefault, 0, -1, 0);
}
@Override
public BlockState getStateAbove(Extent orDefault) {
return getStateRelative(orDefault, 0, 1, 0);
}
@Override
public BlockState getStateRelativeY(Extent orDefault, int y) {
return getStateRelative(orDefault, 0, y, 0);
}
public BlockState getStateRelative(Extent orDefault, int x, int y, int z) {
return getFullBlockRelative(orDefault, x, y, z).toBlockState();
}
public BaseBlock getFullBlockRelative(Extent orDefault, int x, int y, int z) {
return orDefault.getFullBlock(x + getX(), y + getY(), z + getZ());
}
}

Datei anzeigen

@ -64,21 +64,18 @@ public class SchematicStreamer extends NBTStreamer {
NBTStreamReader<? extends Integer, ? extends Integer> idInit = new NBTStreamReader<Integer, Integer>() {
@Override
public void accept(Integer length, Integer type) {
setupClipboard(length);
ids = new FaweOutputStream(new LZ4BlockOutputStream(idOut));
}
};
NBTStreamReader<? extends Integer, ? extends Integer> dataInit = new NBTStreamReader<Integer, Integer>() {
@Override
public void accept(Integer length, Integer type) {
setupClipboard(length);
datas = new FaweOutputStream(new LZ4BlockOutputStream(dataOut));
}
};
NBTStreamReader<? extends Integer, ? extends Integer> addInit = new NBTStreamReader<Integer, Integer>() {
@Override
public void accept(Integer length, Integer type) {
setupClipboard(length*2);
addOut = new FastByteArrayOutputStream();
adds = new FaweOutputStream(new LZ4BlockOutputStream(addOut));
}
@ -203,7 +200,7 @@ public class SchematicStreamer extends NBTStreamer {
private void fixStates() {
for (BlockVector3 pos : fc) {
BlockStateHolder block = pos.getBlock(fc);
BlockState block = pos.getBlock(fc);
if (block.getMaterial().isAir()) continue;
int x = pos.getX();
@ -262,7 +259,7 @@ public class SchematicStreamer extends NBTStreamer {
} else {
int group = group(type);
if (group == -1) return;
BlockStateHolder set = block;
BlockState set = block;
if (set.getState(PropertyKey.NORTH) == Boolean.FALSE && merge(group, x, y, z - 1)) set = set.with(PropertyKey.NORTH, true);
if (set.getState(PropertyKey.EAST) == Boolean.FALSE && merge(group, x + 1, y, z)) set = set.with(PropertyKey.EAST, true);
@ -370,7 +367,7 @@ public class SchematicStreamer extends NBTStreamer {
private LinearClipboard setupClipboard(int size) {
if (fc != null) {
if (fc.getDimensions().getX() == 0) {
fc.setDimensions(BlockVector3.at(size, 1, 1));
// fc.setDimensions(BlockVector3.at(size, 1, 1));
}
return fc;
}
@ -409,10 +406,10 @@ public class SchematicStreamer extends NBTStreamer {
BlockVector3 offset = BlockVector3.at(offsetX, offsetY, offsetZ);
BlockVector3 origin = min.subtract(offset);
BlockVector3 dimensions = BlockVector3.at(width, height, length);
fc.setDimensions(dimensions);
// fc.setDimensions(dimensions);
fixStates();
CuboidRegion region = new CuboidRegion(min, min.add(width, height, length).subtract(BlockVector3.ONE));
clipboard.init(region, fc);
// clipboard.init(region, fc);
clipboard.setOrigin(origin);
return clipboard;
} catch (Throwable e) {

Datei anzeigen

@ -2,6 +2,8 @@ package com.boydti.fawe.object.brush.visualization.cfi;
import com.boydti.fawe.FaweCache;
import com.boydti.fawe.beta.IChunkSet;
import com.boydti.fawe.jnbt.NBTStreamer;
import com.boydti.fawe.object.RunnableVal2;
import com.boydti.fawe.object.collection.BitArray4096;
import com.boydti.fawe.object.collection.BlockVector3ChunkMap;
import com.boydti.fawe.object.io.FastByteArrayOutputStream;
@ -9,6 +11,7 @@ import com.boydti.fawe.util.MathMan;
import com.sk89q.jnbt.CompoundTag;
import com.sk89q.jnbt.ListTag;
import com.sk89q.jnbt.NBTConstants;
import com.sk89q.jnbt.NBTInputStream;
import com.sk89q.jnbt.NBTOutputStream;
import com.sk89q.worldedit.math.BlockVector2;
import com.sk89q.worldedit.math.BlockVector3;
@ -20,6 +23,8 @@ import com.sk89q.worldedit.world.block.BlockState;
import com.sk89q.worldedit.world.block.BlockStateHolder;
import com.sk89q.worldedit.world.block.BlockType;
import com.sk89q.worldedit.world.block.BlockTypes;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@ -38,8 +43,8 @@ public class WritableMCAChunk implements IChunkSet {
public final char[] blocks = new char[65536];
public BlockVector3ChunkMap<CompoundTag> tiles = new BlockVector3ChunkMap<CompoundTag>();
public Map<UUID, CompoundTag> entities = new HashMap<>();
public final BlockVector3ChunkMap<CompoundTag> tiles = new BlockVector3ChunkMap<CompoundTag>();
public final Map<UUID, CompoundTag> entities = new HashMap<>();
public long inhabitedTime = System.currentTimeMillis();
public long lastUpdate;
@ -48,7 +53,82 @@ public class WritableMCAChunk implements IChunkSet {
public int chunkX, chunkZ;
protected WritableMCAChunk() {
public WritableMCAChunk() {}
public WritableMCAChunk(NBTInputStream nis, int chunkX, int chunkZ, boolean readPos) throws IOException {
NBTStreamer streamer = new NBTStreamer(nis);
streamer.addReader(".Level.InhabitedTime", new RunnableVal2<Integer, Long>() {
@Override
public void run(Integer index, Long value) {
inhabitedTime = value;
}
});
streamer.addReader(".Level.LastUpdate", new RunnableVal2<Integer, Long>() {
@Override
public void run(Integer index, Long value) {
lastUpdate = value;
}
});
streamer.addReader(".Level.Sections.#", new RunnableVal2<Integer, CompoundTag>() {
@Override
public void run(Integer index, CompoundTag tag) {
int layer = tag.getByte("Y");
// "Palette"
}
});
streamer.addReader(".Level.Sections.Palette.#", new RunnableVal2<Integer, CompoundTag>() {
@Override
public void run(Integer index, CompoundTag entry) {
String name = entry.getString("Name");
entry.
}
});
streamer.addReader(".Level.TileEntities.#", new RunnableVal2<Integer, CompoundTag>() {
@Override
public void run(Integer index, CompoundTag tile) {
int x = tile.getInt("x") & 15;
int y = tile.getInt("y");
int z = tile.getInt("z") & 15;
tiles.put(x, y, z, tile);
}
});
streamer.addReader(".Level.Entities.#", new RunnableVal2<Integer, CompoundTag>() {
@Override
public void run(Integer index, CompoundTag entityTag) {
long least = entityTag.getLong("UUIDLeast");
long most = entityTag.getLong("UUIDMost");
entities.put(new UUID(most, least), entityTag);
}
});
streamer.addReader(".Level.Biomes", new RunnableVal2<Integer, byte[]>() {
@Override
public void run(Integer index, byte[] value) {
for (int i = 0; i < 256; i++) {
biomes[i] = value[i];
}
}
});
// streamer.addReader(".Level.HeightMap", new RunnableVal2<Integer, int[]>() {
// @Override
// public void run(Integer index, int[] value) {
// heightMap = value;
// }
// });
if (readPos) {
streamer.addReader(".Level.xPos", new RunnableVal2<Integer, Integer>() {
@Override
public void run(Integer index, Integer value) {
WritableMCAChunk.this.chunkX = value;
}
});
streamer.addReader(".Level.zPos", new RunnableVal2<Integer, Integer>() {
@Override
public void run(Integer index, Integer value) {
WritableMCAChunk.this.chunkZ = value;
}
});
}
streamer.readFully();
}
public int getX() {
@ -71,6 +151,10 @@ public class WritableMCAChunk implements IChunkSet {
@Override
public IChunkSet reset() {
return this.reset(true);
}
public IChunkSet reset(boolean full) {
if (!tiles.isEmpty()) {
tiles.clear();
}
@ -80,9 +164,10 @@ public class WritableMCAChunk implements IChunkSet {
modified = 0;
deleted = false;
hasBiomes = false;
// TODO optimize
for (int i = 0; i < 65536; i++) {
blocks[i] = BlockID.AIR;
if (full) {
for (int i = 0; i < 65536; i++) {
blocks[i] = BlockID.AIR;
}
}
Arrays.fill(hasSections, false);
return this;
@ -137,11 +222,10 @@ public class WritableMCAChunk implements IChunkSet {
int num_palette = 0;
try {
for (int i = blockIndexStart, j = 0; i < blockIndexEnd; i++, j++) {
int stateId = blocks[i];
int ordinal = BlockState.getFromInternalId(stateId).getOrdinal(); // TODO fixme Remove all use of BlockTypes.BIT_OFFSET so that this conversion isn't necessary
int ordinal = blocks[i];
int palette = blockToPalette[ordinal];
if (palette == Integer.MAX_VALUE) {
BlockState state = BlockTypes.states[ordinal];
// BlockState state = BlockTypes.states[ordinal];
blockToPalette[ordinal] = palette = num_palette;
paletteToBlock[num_palette] = ordinal;
num_palette++;

Datei anzeigen

@ -29,6 +29,7 @@ import com.sk89q.worldedit.world.block.BlockStateHolder;
import com.sk89q.worldedit.world.block.BlockType;
import javax.annotation.Nullable;
import java.net.URI;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@ -44,6 +45,11 @@ public class DelegateClipboard implements Clipboard {
return parent;
}
@Override
public URI getURI() {
return parent.getURI();
}
@Override
public void setOrigin(BlockVector3 offset) {
parent.setOrigin(offset);

Datei anzeigen

@ -35,6 +35,7 @@ import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
@ -109,6 +110,11 @@ public class DiskOptimizedClipboard extends LinearClipboard implements Closeable
}
}
@Override
public URI getURI() {
return file.toURI();
}
private static BlockVector3 readSize(File file) {
try (DataInputStream is = new DataInputStream(new FileInputStream(file))) {
is.skipBytes(2);

Datei anzeigen

@ -2,6 +2,7 @@ package com.boydti.fawe.object.clipboard;
import com.sk89q.jnbt.CompoundTag;
import com.sk89q.worldedit.WorldEditException;
import com.sk89q.worldedit.entity.Entity;
import com.sk89q.worldedit.extent.clipboard.Clipboard;
import com.sk89q.worldedit.math.BlockVector2;
import com.sk89q.worldedit.math.BlockVector3;
@ -39,6 +40,11 @@ public class EmptyClipboard implements Clipboard {
public void setOrigin(BlockVector3 origin) {
}
@Override
public void removeEntity(Entity entity) {
}
@Override
public BlockVector3 getMinimumPoint() {
return BlockVector3.ZERO;

Datei anzeigen

@ -63,20 +63,25 @@ public abstract class LinearClipboard extends SimpleClipboard implements Clipboa
@Override
public Iterator<BlockVector3> iterator() {
Iterator<BlockVector3> iter = getRegion().iterator_old();
LinearFilter filter = new LinearFilter();
Region region = getRegion();
if (region instanceof CuboidRegion) {
Iterator<BlockVector3> iter = ((CuboidRegion) region).iterator_old();
LinearFilter filter = new LinearFilter();
return new ForwardingIterator<BlockVector3>() {
@Override
protected Iterator<BlockVector3> delegate() {
return iter;
}
return new ForwardingIterator<BlockVector3>() {
@Override
protected Iterator<BlockVector3> delegate() {
return iter;
}
@Override
public BlockVector3 next() {
return filter.next(super.next());
}
};
@Override
public BlockVector3 next() {
return filter.next(super.next());
}
};
} else {
return super.iterator();
}
}
private class LinearFilter extends AbstractFilterBlock {

Datei anzeigen

@ -35,11 +35,8 @@ public class MultiClipboardHolder extends URIClipboardHolder {
super(URI.create(""), EmptyClipboard.INSTANCE);
holders = new ArrayList<>();
URI uri = URI.create("");
if (clipboard instanceof BlockArrayClipboard) {
LinearClipboard fc = ((BlockArrayClipboard) clipboard).IMP;
if (fc instanceof DiskOptimizedClipboard) {
uri = ((DiskOptimizedClipboard) fc).getFile().toURI();
}
if (clipboard.getURI() != null) {
uri = clipboard.getURI();
}
add(uri, clipboard);
}

Datei anzeigen

@ -2,6 +2,8 @@ package com.boydti.fawe.object.schematic;
import static com.google.common.base.Preconditions.checkNotNull;
import com.boydti.fawe.beta.Filter;
import com.boydti.fawe.beta.FilterBlock;
import com.boydti.fawe.object.clipboard.LinearClipboard;
import com.boydti.fawe.object.clipboard.ReadOnlyClipboard;
import com.boydti.fawe.util.EditSessionBuilder;
@ -178,8 +180,7 @@ public class Schematic {
if (transform != null) {
copy.setTransform(transform);
}
copy.setCopyingBiomes(!(clipboard instanceof BlockArrayClipboard) || ((BlockArrayClipboard) clipboard).IMP
.hasBiomes());
copy.setCopyingBiomes(clipboard.hasBiomes());
if (extent instanceof EditSession) {
EditSession editSession = (EditSession) extent;
Mask sourceMask = editSession.getSourceMask();
@ -200,89 +201,47 @@ public class Schematic {
final BlockVector3 bot = clipboard.getMinimumPoint();
final BlockVector3 origin = clipboard.getOrigin();
final boolean copyBiomes =
!(clipboard instanceof BlockArrayClipboard) || ((BlockArrayClipboard) clipboard).IMP
.hasBiomes();
final boolean copyBiomes = clipboard.hasBiomes();
clipboard.apply(clipboard, new Filter() {
@Override
public void applyBlock(FilterBlock block) {
// Optimize for BlockArrayClipboard
if (clipboard instanceof BlockArrayClipboard && region instanceof CuboidRegion) {
// To is relative to the world origin (player loc + small clipboard offset) (As the positions supplied are relative to the clipboard min)
final int relx = to.getBlockX() + bot.getBlockX() - origin.getBlockX();
final int rely = to.getBlockY() + bot.getBlockY() - origin.getBlockY();
final int relz = to.getBlockZ() + bot.getBlockZ() - origin.getBlockZ();
BlockArrayClipboard bac = (BlockArrayClipboard) clipboard;
if (copyBiomes) {
bac.IMP.forEach(new LinearClipboard.BlockReader() {
MutableBlockVector2 mpos2d = new MutableBlockVector2();
{
mpos2d.setComponents(Integer.MIN_VALUE, Integer.MIN_VALUE);
}
@Override
public <B extends BlockStateHolder<B>> void run(int x, int y, int z, B block) {
try {
int xx = x + relx;
int zz = z + relz;
if (xx != mpos2d.getBlockX() || zz != mpos2d.getBlockZ()) {
mpos2d.setComponents(xx, zz);
extent.setBiome(mpos2d, bac.IMP.getBiome(x, z));
}
if (!pasteAir && block.getBlockType().getMaterial().isAir()) {
return;
}
extent.setBlock(xx, y + rely, zz, block);
} catch (WorldEditException e) {
throw new RuntimeException(e);
}
}
}, true);
} else {
bac.IMP.forEach(new LinearClipboard.BlockReader() {
@Override
public <B extends BlockStateHolder<B>> void run(int x, int y, int z, B block) {
try {
extent.setBlock(x + relx, y + rely, z + relz, block);
} catch (WorldEditException e) {
throw new RuntimeException(e);
}
}
}, pasteAir);
}
} else {
// To must be relative to the clipboard origin ( player location - clipboard origin ) (as the locations supplied are relative to the world origin)
final int relx = to.getBlockX() - origin.getBlockX();
final int rely = to.getBlockY() - origin.getBlockY();
final int relz = to.getBlockZ() - origin.getBlockZ();
Operation visitor = new RegionVisitor(region, new RegionFunction() {
// MutableBlockVector2 mpos2d_2 = new MutableBlockVector2();
MutableBlockVector2 mpos2d = new MutableBlockVector2();
});
{
mpos2d.setComponents(Integer.MIN_VALUE, Integer.MIN_VALUE);
}
System.out.println("TODO optimize paste using above apply");
// Optimize for BlockArrayClipboard
// To must be relative to the clipboard origin ( player location - clipboard origin ) (as the locations supplied are relative to the world origin)
final int relx = to.getBlockX() - origin.getBlockX();
final int rely = to.getBlockY() - origin.getBlockY();
final int relz = to.getBlockZ() - origin.getBlockZ();
Operation visitor = new RegionVisitor(region, new RegionFunction() {
// MutableBlockVector2 mpos2d_2 = new MutableBlockVector2();
MutableBlockVector2 mpos2d = new MutableBlockVector2();
@Override
public boolean apply(BlockVector3 mutable) throws WorldEditException {
BlockState block = clipboard.getBlock(mutable);
int xx = mutable.getBlockX() + relx;
int zz = mutable.getBlockZ() + relz;
if (copyBiomes && xx != mpos2d.getBlockX() && zz != mpos2d.getBlockZ()) {
mpos2d.setComponents(xx, zz);
{
mpos2d.setComponents(Integer.MIN_VALUE, Integer.MIN_VALUE);
}
@Override
public boolean apply(BlockVector3 mutable) throws WorldEditException {
BlockState block = clipboard.getBlock(mutable);
int xx = mutable.getBlockX() + relx;
int zz = mutable.getBlockZ() + relz;
if (copyBiomes && xx != mpos2d.getBlockX() && zz != mpos2d.getBlockZ()) {
mpos2d.setComponents(xx, zz);
// extent.setBiome(mpos2d, clipboard.getBiome(mpos2d_2.setComponents(mutable.getBlockX(), mutable.getBlockZ())));
extent.setBiome(mpos2d, clipboard
extent.setBiome(mpos2d, clipboard
.getBiome(BlockVector2.at(mutable.getBlockX(), mutable.getBlockZ())));
}
if (!pasteAir && block.getBlockType().getMaterial().isAir()) {
return false;
}
extent.setBlock(xx, mutable.getBlockY() + rely, zz, block);
}
if (!pasteAir && block.getBlockType().getMaterial().isAir()) {
return false;
}
});
Operations.completeBlindly(visitor);
}
extent.setBlock(xx, mutable.getBlockY() + rely, zz, block);
return false;
}
});
Operations.completeBlindly(visitor);
// Entity offset is the paste location subtract the clipboard origin (entity's location is already relative to the world origin)
final int entityOffsetX = to.getBlockX() - origin.getBlockX();
final int entityOffsetY = to.getBlockY() - origin.getBlockY();

Datei anzeigen

@ -0,0 +1,681 @@
package com.sk89q.jnbt.anvil;
import com.boydti.fawe.Fawe;
import com.boydti.fawe.beta.IBlocks;
import com.boydti.fawe.jnbt.NBTStreamer;
import com.boydti.fawe.object.RunnableVal;
import com.boydti.fawe.object.RunnableVal4;
import com.boydti.fawe.object.brush.visualization.cfi.WritableMCAChunk;
import com.boydti.fawe.object.collection.CleanableThreadLocal;
import com.boydti.fawe.object.exception.FaweException;
import com.boydti.fawe.object.io.BufferedRandomAccessFile;
import com.boydti.fawe.object.io.FastByteArrayInputStream;
import com.boydti.fawe.util.MainUtil;
import com.boydti.fawe.util.MathMan;
import com.sk89q.jnbt.NBTInputStream;
import com.sk89q.worldedit.world.World;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
/**
* Chunk format: http://minecraft.gamepedia.com/Chunk_format#Entity_format
* e.g.: `.Level.Entities.#` (Starts with a . as the root tag is unnamed)
*/
public class MCAFile {
private static Field fieldBuf2;
private static Field fieldBuf3;
static {
try {
fieldBuf2 = InflaterInputStream.class.getDeclaredField("buf");
fieldBuf2.setAccessible(true);
fieldBuf3 = NBTInputStream.class.getDeclaredField("buf");
fieldBuf3.setAccessible(true);
} catch (Throwable e) {
e.printStackTrace();
}
}
private final World world;
private final File file;
private RandomAccessFile raf;
private byte[] locations;
private boolean deleted;
private final int X, Z;
private final Int2ObjectOpenHashMap<WritableMCAChunk> chunks = new Int2ObjectOpenHashMap<>();
final ThreadLocal<byte[]> byteStore1 = new ThreadLocal<byte[]>() {
@Override
protected byte[] initialValue() {
return new byte[4096];
}
};
final ThreadLocal<byte[]> byteStore2 = new ThreadLocal<byte[]>() {
@Override
protected byte[] initialValue() {
return new byte[4096];
}
};
final ThreadLocal<byte[]> byteStore3 = new ThreadLocal<byte[]>() {
@Override
protected byte[] initialValue() {
return new byte[1024];
}
};
public MCAFile(World world, File file) throws FileNotFoundException {
this.world = world;
this.file = file;
if (!file.exists()) {
throw new FileNotFoundException(file.getName());
}
String[] split = file.getName().split("\\.");
X = Integer.parseInt(split[1]);
Z = Integer.parseInt(split[2]);
}
public MCAFile(World world, int mcrX, int mcrZ) {
this(world, mcrX, mcrZ, new File(world.getStoragePath().toFile(), "r." + mcrX + "." + mcrZ + ".mca"));
}
public MCAFile(World world, int mcrX, int mcrZ, File file) {
this.world = world;
this.file = file;
X = mcrX;
Z = mcrZ;
}
public void clear() {
if (raf != null) {
try {
raf.close();
} catch (IOException e) {
e.printStackTrace();
}
}
synchronized (chunks) {
chunks.clear();
}
locations = null;
CleanableThreadLocal.clean(byteStore1);
CleanableThreadLocal.clean(byteStore2);
CleanableThreadLocal.clean(byteStore3);
}
@Override
protected void finalize() throws Throwable {
CleanableThreadLocal.clean(byteStore1);
CleanableThreadLocal.clean(byteStore2);
CleanableThreadLocal.clean(byteStore3);
super.finalize();
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
public boolean isDeleted() {
return deleted;
}
public World getWorld() {
return world;
}
/**
* Loads the location header from disk
*/
public void init() {
try {
if (raf == null) {
this.locations = new byte[4096];
if (file != null) {
this.raf = new RandomAccessFile(file, "rw");
if (raf.length() < 8192) {
raf.setLength(8192);
} else {
raf.seek(0);
raf.readFully(locations);
}
}
}
} catch (Throwable e) {
e.printStackTrace();
}
}
public int getX() {
return X;
}
public int getZ() {
return Z;
}
public RandomAccessFile getRandomAccessFile() {
return raf;
}
public File getFile() {
return file;
}
public WritableMCAChunk getCachedChunk(int cx, int cz) {
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
synchronized (chunks) {
return chunks.get(pair);
}
}
public void setChunk(WritableMCAChunk chunk) {
int cx = chunk.getX();
int cz = chunk.getZ();
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
synchronized (chunks) {
chunks.put(pair, chunk);
}
}
public WritableMCAChunk getChunk(int cx, int cz) throws IOException {
WritableMCAChunk cached = getCachedChunk(cx, cz);
if (cached != null) {
return cached;
} else {
return readChunk(cx, cz);
}
}
public WritableMCAChunk readChunk(int cx, int cz) throws IOException {
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF))) << 12;
int size = (locations[i + 3] & 0xFF) << 12;
if (offset == 0) {
return null;
}
NBTInputStream nis = getChunkIS(offset);
WritableMCAChunk chunk = new WritableMCAChunk(nis, cx, cz, false);
nis.close();
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
synchronized (chunks) {
chunks.put(pair, chunk);
}
return chunk;
}
/**
* CX, CZ, OFFSET, SIZE
*
* @param onEach
* @throws IOException
*/
public void forEachSortedChunk(RunnableVal4<Integer, Integer, Integer, Integer> onEach) throws IOException {
char[] offsets = new char[(int) (raf.length() / 4096) - 2];
Arrays.fill(offsets, Character.MAX_VALUE);
char i = 0;
for (int z = 0; z < 32; z++) {
for (int x = 0; x < 32; x++, i += 4) {
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF))) - 2;
int size = locations[i + 3] & 0xFF;
if (size != 0) {
if (offset < offsets.length) {
offsets[offset] = i;
} else {
Fawe.debug("Ignoring invalid offset " + offset);
}
}
}
}
for (i = 0; i < offsets.length; i++) {
int index = offsets[i];
if (index != Character.MAX_VALUE) {
int offset = i + 2;
int size = locations[index + 3] & 0xFF;
int index2 = index >> 2;
int x = (index2) & 31;
int z = (index2) >> 5;
onEach.run(x, z, offset << 12, size << 12);
}
}
}
/**
* @param onEach cx, cz, offset, size
*/
public void forEachChunk(RunnableVal4<Integer, Integer, Integer, Integer> onEach) {
int i = 0;
for (int z = 0; z < 32; z++) {
for (int x = 0; x < 32; x++, i += 4) {
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
int size = locations[i + 3] & 0xFF;
if (size != 0) {
onEach.run(x, z, offset << 12, size << 12);
}
}
}
}
public void forEachChunk(RunnableVal<WritableMCAChunk> onEach) {
int i = 0;
for (int z = 0; z < 32; z++) {
for (int x = 0; x < 32; x++, i += 4) {
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
int size = locations[i + 3] & 0xFF;
if (size != 0) {
try {
onEach.run(getChunk(x, z));
} catch (Throwable ignore) {
}
}
}
}
}
public int getOffset(int cx, int cz) {
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
return offset << 12;
}
public int getSize(int cx, int cz) {
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
return (locations[i + 3] & 0xFF) << 12;
}
public List<Integer> getChunks() {
final List<Integer> values;
synchronized (chunks) {
values = new ArrayList<>(chunks.size());
}
for (int i = 0; i < locations.length; i += 4) {
int offset = (((locations[i] & 0xFF) << 16) + ((locations[i + 1] & 0xFF) << 8) + ((locations[i + 2] & 0xFF)));
values.add(offset);
}
return values;
}
public byte[] getChunkCompressedBytes(int offset) throws IOException {
if (offset == 0) {
return null;
}
synchronized (raf) {
raf.seek(offset);
int size = raf.readInt();
int compression = raf.read();
byte[] data = new byte[size];
raf.readFully(data);
return data;
}
}
private NBTInputStream getChunkIS(int offset) throws IOException {
try {
byte[] data = getChunkCompressedBytes(offset);
FastByteArrayInputStream bais = new FastByteArrayInputStream(data);
InflaterInputStream iis = new InflaterInputStream(bais, new Inflater(), 1);
fieldBuf2.set(iis, byteStore2.get());
BufferedInputStream bis = new BufferedInputStream(iis);
NBTInputStream nis = new NBTInputStream(bis);
fieldBuf3.set(nis, byteStore3.get());
return nis;
} catch (IllegalAccessException unlikely) {
unlikely.printStackTrace();
return null;
}
}
public void streamChunk(int cx, int cz, RunnableVal<NBTStreamer> addReaders) throws IOException {
streamChunk(getOffset(cx, cz), addReaders);
}
public void streamChunk(int offset, RunnableVal<NBTStreamer> withStream) throws IOException {
byte[] data = getChunkCompressedBytes(offset);
streamChunk(data, withStream);
}
public void streamChunk(byte[] data, RunnableVal<NBTStreamer> withStream) throws IOException {
if (data != null) {
try {
FastByteArrayInputStream nbtIn = new FastByteArrayInputStream(data);
FastByteArrayInputStream bais = new FastByteArrayInputStream(data);
InflaterInputStream iis = new InflaterInputStream(bais, new Inflater(), 1);
fieldBuf2.set(iis, byteStore2.get());
BufferedInputStream bis = new BufferedInputStream(iis);
NBTInputStream nis = new NBTInputStream(bis);
fieldBuf3.set(nis, byteStore3.get());
NBTStreamer streamer = new NBTStreamer(nis);
withStream.run(streamer);
streamer.readQuick();
} catch (IllegalAccessException unlikely) {
unlikely.printStackTrace();
}
}
}
/**
* @param onEach chunk
*/
public void forEachCachedChunk(RunnableVal<WritableMCAChunk> onEach) {
synchronized (chunks) {
for (Map.Entry<Integer, WritableMCAChunk> entry : chunks.entrySet()) {
onEach.run(entry.getValue());
}
}
}
public List<WritableMCAChunk> getCachedChunks() {
synchronized (chunks) {
return new ArrayList<>(chunks.values());
}
}
public void uncache(int cx, int cz) {
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
synchronized (chunks) {
chunks.remove(pair);
}
}
private byte[] toBytes(WritableMCAChunk chunk) throws Exception {
if (chunk.isDeleted()) {
return null;
}
byte[] uncompressed = chunk.toBytes(byteStore3.get());
byte[] compressed = MainUtil.compress(uncompressed, byteStore2.get(), null);
return compressed;
}
private byte[] getChunkBytes(int cx, int cz) throws Exception {
WritableMCAChunk mca = getCachedChunk(cx, cz);
if (mca == null) {
int offset = getOffset(cx, cz);
if (offset == 0) {
return null;
}
return getChunkCompressedBytes(offset);
}
return toBytes(mca);
}
private void writeSafe(RandomAccessFile raf, int offset, byte[] data) throws IOException {
int len = data.length + 5;
raf.seek(offset);
if (raf.length() - offset < len) {
raf.setLength(((offset + len + 4095) / 4096) * 4096);
}
// Length of remaining data
raf.writeInt(data.length + 1);
// Compression type
raf.write(2);
raf.write(data);
}
private void writeHeader(RandomAccessFile raf, int cx, int cz, int offsetMedium, int sizeByte, boolean writeTime) throws IOException {
int i = ((cx & 31) << 2) + ((cz & 31) << 7);
locations[i] = (byte) (offsetMedium >> 16);
locations[i + 1] = (byte) (offsetMedium >> 8);
locations[i + 2] = (byte) (offsetMedium);
locations[i + 3] = (byte) sizeByte;
raf.seek(i);
raf.write((offsetMedium >> 16));
raf.write((offsetMedium >> 8));
raf.write((offsetMedium >> 0));
raf.write(sizeByte);
raf.seek(i + 4096);
if (offsetMedium == 0 && sizeByte == 0) {
raf.writeInt(0);
} else {
raf.writeInt((int) (System.currentTimeMillis() / 1000L));
}
}
public void close(ForkJoinPool pool) {
if (raf == null) return;
synchronized (raf) {
if (raf != null) {
flush(pool);
try {
raf.close();
} catch (IOException e) {
e.printStackTrace();
}
raf = null;
locations = null;
}
}
}
public boolean isModified() {
if (isDeleted()) {
return true;
}
synchronized (chunks) {
for (Int2ObjectMap.Entry<WritableMCAChunk> entry : chunks.int2ObjectEntrySet()) {
WritableMCAChunk chunk = entry.getValue();
if (chunk.isModified() || chunk.isDeleted()) {
return true;
}
}
}
return false;
}
/**
* Write the chunk to the file
* @param pool
*/
public void flush(ForkJoinPool pool) {
synchronized (raf) {
// If the file is marked as deleted, nothing is written
if (isDeleted()) {
clear();
file.delete();
return;
}
boolean wait; // If the flush method needs to wait for the pool
if (pool == null) {
wait = true;
pool = new ForkJoinPool();
} else wait = false;
// Chunks that need to be relocated
Int2ObjectOpenHashMap<byte[]> relocate = new Int2ObjectOpenHashMap<>();
// The position of each chunk
final Int2ObjectOpenHashMap<Integer> offsetMap = new Int2ObjectOpenHashMap<>(); // Offset -> <byte cx, byte cz, short size>
// The data of each modified chunk
final Int2ObjectOpenHashMap<byte[]> compressedMap = new Int2ObjectOpenHashMap<>();
// The data of each chunk that needs to be moved
final Int2ObjectOpenHashMap<byte[]> append = new Int2ObjectOpenHashMap<>();
boolean modified = false;
// Get the current time for the chunk timestamp
long now = System.currentTimeMillis();
// Load the chunks into the append or compressed map
for (WritableMCAChunk chunk : getCachedChunks()) {
if (chunk.isModified() || chunk.isDeleted()) {
modified = true;
chunk.setLastUpdate(now);
if (!chunk.isDeleted()) {
pool.submit(new Runnable() {
@Override
public void run() {
try {
byte[] compressed = toBytes(chunk);
int pair = MathMan.pair((short) (chunk.getX() & 31), (short) (chunk.getZ() & 31));
Int2ObjectOpenHashMap map;
if (getOffset(chunk.getX(), chunk.getZ()) == 0) {
map = append;
} else {
map = compressedMap;
}
synchronized (map) {
map.put(pair, compressed);
}
} catch (Throwable e) {
e.printStackTrace();
}
}
});
}
}
}
// If any changes were detected
if (modified) {
file.setLastModified(now);
// Load the offset data into the offset map
forEachChunk(new RunnableVal4<Integer, Integer, Integer, Integer>() {
@Override
public void run(Integer cx, Integer cz, Integer offset, Integer size) {
short pair1 = MathMan.pairByte((byte) (cx & 31), (byte) (cz & 31));
short pair2 = (short) (size >> 12);
offsetMap.put((int) offset, (Integer) MathMan.pair(pair1, pair2));
}
});
// Wait for previous tasks
pool.awaitQuiescence(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
int start = 8192;
int written = start;
int end = 8192;
int nextOffset = 8192;
try {
for (int count = 0; count < offsetMap.size(); count++) {
// Get the previous position of the next chunk
Integer loc = offsetMap.get(nextOffset);
while (loc == null) {
nextOffset += 4096;
loc = offsetMap.get(nextOffset);
}
int offset = nextOffset;
// Get the x/z from the paired location
short cxz = MathMan.unpairX(loc);
int cx = MathMan.unpairShortX(cxz);
int cz = MathMan.unpairShortY(cxz);
// Get the size from the pair
int size = MathMan.unpairY(loc) << 12;
nextOffset += size;
end = Math.min(start + size, end);
int pair = MathMan.pair((short) (cx & 31), (short) (cz & 31));
byte[] newBytes = relocate.get(pair);
// newBytes is null if the chunk isn't modified or marked for moving
if (newBytes == null) {
WritableMCAChunk cached = getCachedChunk(cx, cz);
// If the previous offset marks the current write position (start) then we only write the header
if (offset == start) {
if (cached == null || !cached.isModified()) {
writeHeader(raf, cx, cz, start >> 12, size >> 12, true);
start += size;
written = start + size;
continue;
} else {
newBytes = compressedMap.get(pair);
}
} else {
// The chunk needs to be moved, fetch the data if necessary
newBytes = compressedMap.get(pair);
if (newBytes == null) {
if (cached == null || !cached.isDeleted()) {
newBytes = getChunkCompressedBytes(getOffset(cx, cz));
}
}
}
}
if (newBytes == null) {
writeHeader(raf, cx, cz, 0, 0, false);
continue;
}
// The length to be written (compressed data + 5 byte chunk header)
int len = newBytes.length + 5;
int oldSize = (size + 4095) >> 12;
int newSize = (len + 4095) >> 12;
int nextOffset2 = end;
// If the current write position (start) + length of data to write (len) are longer than the position of the next chunk, we need to move the next chunks
while (start + len > end) {
Integer nextLoc = offsetMap.get(nextOffset2);
if (nextLoc != null) {
short nextCXZ = MathMan.unpairX(nextLoc);
int nextCX = MathMan.unpairShortX(nextCXZ);
int nextCZ = MathMan.unpairShortY(nextCXZ);
WritableMCAChunk cached = getCachedChunk(nextCX, nextCZ);
if (cached == null || !cached.isModified()) {
byte[] nextBytes = getChunkCompressedBytes(nextOffset2);
relocate.put(MathMan.pair((short) (nextCX & 31), (short) (nextCZ & 31)), nextBytes);
}
int nextSize = MathMan.unpairY(nextLoc) << 12;
end += nextSize;
nextOffset2 += nextSize;
} else {
end += 4096;
nextOffset2 += 4096;
}
}
// Write the chunk + chunk header
writeSafe(raf, start, newBytes);
// Write the location data (beginning of file)
writeHeader(raf, cx, cz, start >> 12, newSize, true);
written = start + newBytes.length + 5;
start += newSize << 12;
}
// Write all the chunks which need to be appended
if (!append.isEmpty()) {
for (Int2ObjectMap.Entry<byte[]> entry : append.int2ObjectEntrySet()) {
int pair = entry.getIntKey();
short cx = MathMan.unpairX(pair);
short cz = MathMan.unpairY(pair);
byte[] bytes = entry.getValue();
int len = bytes.length + 5;
int newSize = (len + 4095) >> 12;
writeSafe(raf, start, bytes);
writeHeader(raf, cx, cz, start >> 12, newSize, true);
written = start + bytes.length + 5;
start += newSize << 12;
}
}
// Round the file length, since the vanilla server doesn't like it for some reason
raf.setLength(4096 * ((written + 4095) / 4096));
if (raf instanceof BufferedRandomAccessFile) {
((BufferedRandomAccessFile) raf).flush();
}
raf.close();
} catch (Throwable e) {
e.printStackTrace();
}
if (wait) {
pool.shutdown();
pool.awaitQuiescence(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
}
}
}
CleanableThreadLocal.clean(byteStore1);
CleanableThreadLocal.clean(byteStore2);
CleanableThreadLocal.clean(byteStore3);
}
}

Datei anzeigen

@ -0,0 +1,4 @@
package com.sk89q.jnbt.anvil;
public class MCAWorld {
}

Datei anzeigen

@ -34,6 +34,7 @@ import com.sk89q.worldedit.regions.Regions;
import com.sk89q.worldedit.util.Location;
import javax.annotation.Nullable;
import java.net.URI;
import java.util.Iterator;
import java.util.UUID;
@ -127,6 +128,14 @@ public interface Clipboard extends Extent, Iterable<BlockVector3> {
return Regions.asFlatRegion(getRegion()).asFlatRegion().iterator();
}
default URI getURI() {
return null;
}
// default void paste(Extent other, BlockVector3 to) {
// TODO FIXME
// }
@Override
default <T extends Filter> T apply(Region region, T filter) {
if (region.equals(getRegion())) {

Datei anzeigen

@ -0,0 +1,4 @@
package com.sk89q.worldedit.extent.clipboard.io;
public class FaweFormat {
}

Datei anzeigen

@ -48,6 +48,7 @@ import com.sk89q.worldedit.extent.clipboard.Clipboard;
import com.sk89q.worldedit.math.BlockVector2;
import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.regions.CuboidRegion;
import com.sk89q.worldedit.util.Location;
import com.sk89q.worldedit.world.DataFixer;
import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.biome.BiomeTypes;
@ -113,7 +114,7 @@ public class SpongeSchematicReader extends NBTSchematicReader {
private LinearClipboard setupClipboard(int size, UUID uuid) {
if (fc != null) {
if (fc.getDimensions().getX() == 0) {
fc.setDimensions(BlockVector3.at(size, 1, 1));
// fc.setDimensions(BlockVector3.at(size, 1, 1));
}
return fc;
}
@ -237,20 +238,19 @@ public class SpongeSchematicReader extends NBTSchematicReader {
value.remove("Id");
}
ListTag positionTag = compound.getListTag("Pos");
ListTag directionTag = compound.getListTag("Rotation");
EntityType type = EntityTypes.parse(id.getValue());
if (type != null) {
compound = fixEntity(compound);
BaseEntity state = new BaseEntity(type, compound);
fc.createEntity(clipboard, positionTag.asDouble(0), positionTag.asDouble(1), positionTag.asDouble(2), (float) directionTag.asDouble(0), (float) directionTag.asDouble(1), state);
Location loc = compound.getEntityLocation(fc);
fc.createEntity(loc, state);
} else {
Fawe.debug("Invalid entity: " + id);
}
});
streamer.readFully();
if (fc == null) setupClipboard(length * width * height, uuid);
fc.setDimensions(BlockVector3.at(width, height, length));
// fc.setDimensions(BlockVector3.at(width, height, length));
BlockVector3 origin = min;
CuboidRegion region;
if (offsetX != Integer.MIN_VALUE && offsetY != Integer.MIN_VALUE && offsetZ != Integer.MIN_VALUE) {
@ -281,7 +281,7 @@ public class SpongeSchematicReader extends NBTSchematicReader {
}
}
}
clipboard.init(region, fc);
// clipboard.init(region, fc);
clipboard.setOrigin(origin);
return clipboard;
}

Datei anzeigen

@ -143,58 +143,29 @@ public class SpongeSchematicWriter implements ClipboardWriter {
char[] palette = new char[BlockTypes.states.length];
Arrays.fill(palette, Character.MAX_VALUE);
int[] paletteMax = {0};
int numTiles = 0;
for (BlockVector3 pos : clipboard) {
BaseBlock block = pos.getFullBlock(clipboard);
CompoundTag nbt = block.getNbtData();
if (nbt != null) {
Map<String, Tag> values = nbt.getValue();
values.remove("id"); // Remove 'id' if it exists. We want 'Id'
int[] numTiles = {0};
LinearClipboard.BlockReader reader = new LinearClipboard.BlockReader() {
@Override
public <B extends BlockStateHolder<B>> void run(int x, int y, int z, B block) {
try {
if (block.hasNbtData()) {
CompoundTag nbt = block.getNbtData();
if (nbt != null) {
Map<String, Tag> values = nbt.getValue();
values.remove("id"); // Remove 'id' if it exists. We want 'Id'
// Positions are kept in NBT, we don't want that.
values.remove("x");
values.remove("y");
values.remove("z");
if (!values.containsKey("Id")) {
values.put("Id", new StringTag(block.getNbtId()));
}
values.put("Pos", new IntArrayTag(new int[]{
x,
y,
z
}));
numTiles[0]++;
tilesOut.writeTagPayload(block.getNbtData());
}
}
int ordinal = block.getOrdinal();
char value = palette[ordinal];
if (value == Character.MAX_VALUE) {
int size = paletteMax[0]++;
palette[ordinal] = value = (char) size;
paletteList.add(ordinal);
}
IOUtil.writeVarInt(blocksOut, value);
} catch (IOException e) {
throw new RuntimeException(e);
// Positions are kept in NBT, we don't want that.
values.remove("x");
values.remove("y");
values.remove("z");
if (!values.containsKey("Id")) {
values.put("Id", new StringTag(block.getNbtId()));
}
}
};
if (clipboard instanceof BlockArrayClipboard) {
((BlockArrayClipboard) clipboard).IMP.forEach(reader, true);
} else {
for (BlockVector3 pt : region) {
BaseBlock block = clipboard.getFullBlock(pt);
int x = pt.getBlockX() - min.getBlockX();
int y = pt.getBlockY() - min.getBlockY();
int z = pt.getBlockZ() - min.getBlockY();
reader.run(x, y, z, block);
values.put("Pos", new IntArrayTag(new int[]{
pos.getX(),
pos.getY(),
pos.getZ()
}));
numTiles++;
tilesOut.writeTagPayload(block.getNbtData());
}
}
// close
@ -217,10 +188,10 @@ public class SpongeSchematicWriter implements ClipboardWriter {
IOUtil.copy(in, rawStream);
}
if (numTiles[0] != 0) {
if (numTiles != 0) {
out.writeNamedTagName("TileEntities", NBTConstants.TYPE_LIST);
rawStream.write(NBTConstants.TYPE_COMPOUND);
rawStream.writeInt(numTiles[0]);
rawStream.writeInt(numTiles);
try (LZ4BlockInputStream in = new LZ4BlockInputStream(new ByteArrayInputStream(tilesCompressed.toByteArray()))) {
IOUtil.copy(in, rawStream);
}
@ -287,20 +258,17 @@ public class SpongeSchematicWriter implements ClipboardWriter {
}
}
};
if (clipboard instanceof BlockArrayClipboard) {
((BlockArrayClipboard) clipboard).IMP.streamBiomes(task);
} else {
BlockVector3 min = clipboard.getMinimumPoint();
int width = clipboard.getRegion().getWidth();
int length = clipboard.getRegion().getLength();
for (int z = 0, i = 0; z < length; z++) {
int z0 = min.getBlockZ() + z;
for (int x = 0; x < width; x++, i++) {
int x0 = min.getBlockX() + x;
BlockVector2 pt = BlockVector2.at(x0, z0);
BiomeType biome = clipboard.getBiome(pt);
task.run(i, biome.getInternalId());
}
System.out.println("TODO Optimize biome write");
BlockVector3 min = clipboard.getMinimumPoint();
int width = clipboard.getRegion().getWidth();
int length = clipboard.getRegion().getLength();
for (int z = 0, i = 0; z < length; z++) {
int z0 = min.getBlockZ() + z;
for (int x = 0; x < width; x++, i++) {
int x0 = min.getBlockX() + x;
BlockVector2 pt = BlockVector2.at(x0, z0);
BiomeType biome = clipboard.getBiome(pt);
task.run(i, biome.getInternalId());
}
}
biomesOut.close();

Datei anzeigen

@ -34,6 +34,7 @@ import com.sk89q.worldedit.entity.Entity;
import com.sk89q.worldedit.entity.metadata.EntityProperties;
import com.sk89q.worldedit.extent.Extent;
import com.sk89q.worldedit.extent.clipboard.BlockArrayClipboard;
import com.sk89q.worldedit.extent.clipboard.Clipboard;
import com.sk89q.worldedit.function.CombinedRegionFunction;
import com.sk89q.worldedit.function.RegionFunction;
import com.sk89q.worldedit.function.RegionMaskTestFunction;
@ -300,7 +301,7 @@ public class ForwardExtentCopy implements Operation {
new MaskTraverser(sourceMask).reset(transExt);
copy = new RegionMaskingFilter(sourceMask, copy);
}
if (copyingBiomes && (!(source instanceof BlockArrayClipboard) || ((BlockArrayClipboard) source).IMP.hasBiomes())) {
if (copyingBiomes && source.isWorld() || (source instanceof Clipboard && ((Clipboard) source).hasBiomes())) {
copy = CombinedRegionFunction.combine(copy, new BiomeCopy(source, finalDest));
}
blockCopy = new BackwardsExtentBlockCopy(region, from, transform, copy);
@ -354,7 +355,7 @@ public class ForwardExtentCopy implements Operation {
if (maskFunc != null) copy = new RegionMaskTestFunction(sourceMask, copy, maskFunc);
else copy = new RegionMaskingFilter(sourceMask, copy);
}
if (copyingBiomes && (!(source instanceof BlockArrayClipboard) || ((BlockArrayClipboard) source).IMP.hasBiomes())) {
if (copyingBiomes && source.isWorld() || (source instanceof Clipboard && ((Clipboard) source).hasBiomes())) {
copy = CombinedRegionFunction.combine(copy, new BiomeCopy(source, finalDest));
}
blockCopy = new RegionVisitor(region, copy);

Datei anzeigen

@ -31,8 +31,6 @@ import com.sk89q.worldedit.math.transform.AffineTransform;
import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.block.BaseBlock;
import com.sk89q.worldedit.world.block.BlockState;
import com.sk89q.worldedit.world.block.BlockStateHolder;
import java.util.Comparator;
/**
@ -699,8 +697,7 @@ public abstract class BlockVector3 {
return orDefault.setBlock(this, BlockState.getFromOrdinal(ordinal));
}
public <T extends BlockStateHolder<T>> boolean setBlock(Extent orDefault, T state) {
public boolean setBlock(Extent orDefault, BlockState state) {
return orDefault.setBlock(this, state);
}