From ebc232efe1e4404af8cd75cdd8d42c1e3762396f Mon Sep 17 00:00:00 2001 From: dordsor21 Date: Sun, 20 Aug 2023 15:18:32 +0100 Subject: [PATCH] MVP for anvil --- .../adapter/ext/fawe/PaperweightAdapter.java | 10 + .../ext/fawe/v1_18_R2/PaperweightAdapter.java | 10 + .../ext/fawe/v1_19_R3/PaperweightAdapter.java | 10 + .../ext/fawe/v1_20_R1/PaperweightAdapter.java | 10 + .../fastasyncworldedit/bukkit/FaweBukkit.java | 11 + .../bukkit/adapter/CachedBukkitAdapter.java | 4 - .../bukkit/adapter/IBukkitAdapter.java | 10 + .../bukkit/adapter/SimpleBukkitAdapter.java | 4 +- .../sk89q/worldedit/bukkit/BukkitAdapter.java | 16 + .../bukkit/BukkitServerInterface.java | 12 + .../com/fastasyncworldedit/core/IFawe.java | 5 + .../core/anvil/MCAChunk.java | 1143 +++++++++++++++++ .../core/anvil/MCAFile.java | 595 +++++++++ .../core/anvil/MCAWorld.java | 202 +++ .../internal/io/BufferedRandomAccessFile.java | 434 +++++++ .../core/jnbt/streamer/StreamDelegate.java | 96 +- .../core/math/FastBitSet.java | 39 +- .../fastasyncworldedit/core/queue/IChunk.java | 2 - .../core/queue/IChunkGet.java | 1 + .../core/queue/IQueueExtent.java | 2 + .../ParallelAnvilQueueExtent.java | 279 ++++ .../queue/implementation/QueueHandler.java | 39 + .../SingleThreadQueueExtent.java | 16 + .../blocks/ThreadUnsafeCharBlocks.java | 2 +- .../implementation/chunk/ChunkHolder.java | 5 +- .../core/regions/WorldRegionsRegion.java | 281 ++++ .../java/com/sk89q/jnbt/NBTInputStream.java | 366 +++--- .../java/com/sk89q/jnbt/NBTOutputStream.java | 27 +- .../worldedit}/command/AnvilCommands.java | 78 +- .../command/argument/MCAWorldConverter.java | 96 ++ .../extension/platform/Platform.java | 16 + .../platform/PlatformCommandManager.java | 11 + .../worldedit/regions/AbstractRegion.java | 2 - .../src/main/resources/lang/strings.json | 1 + 34 files changed, 3581 insertions(+), 254 deletions(-) create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAChunk.java create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAFile.java create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAWorld.java create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/internal/io/BufferedRandomAccessFile.java create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/ParallelAnvilQueueExtent.java create mode 100644 worldedit-core/src/main/java/com/fastasyncworldedit/core/regions/WorldRegionsRegion.java rename worldedit-core/src/main/java/com/{fastasyncworldedit/core => sk89q/worldedit}/command/AnvilCommands.java (92%) create mode 100644 worldedit-core/src/main/java/com/sk89q/worldedit/command/argument/MCAWorldConverter.java diff --git a/worldedit-bukkit/adapters/adapter-1_17_1/src/main/java/com/sk89q/worldedit/bukkit/adapter/ext/fawe/PaperweightAdapter.java b/worldedit-bukkit/adapters/adapter-1_17_1/src/main/java/com/sk89q/worldedit/bukkit/adapter/ext/fawe/PaperweightAdapter.java index 52974fb02..1b012fe1c 100644 --- a/worldedit-bukkit/adapters/adapter-1_17_1/src/main/java/com/sk89q/worldedit/bukkit/adapter/ext/fawe/PaperweightAdapter.java +++ b/worldedit-bukkit/adapters/adapter-1_17_1/src/main/java/com/sk89q/worldedit/bukkit/adapter/ext/fawe/PaperweightAdapter.java @@ -541,6 +541,16 @@ public final class PaperweightAdapter implements BukkitImplAdapter tiles = new HashMap<>(); // Stored with the world coordinates + // Cache as may be unused + private char[][] sectionPalettes; + private BiomeType[][] biomePalettes; + private long[][] sectionLongs; + private long[][] biomeLongs; + private char[][] blocks; + private BiomeType[][] biomes; + private long lastUpdate; + private long inhabitedTime; + private Status status; + + private final Map retained = new HashMap<>(); + private boolean modified = false; + private boolean deleted = false; + private int minSectionPosition = Integer.MAX_VALUE; + private int maxSectionPosition = Integer.MIN_VALUE; + private int sectionCount; + private Map[] sectionsRetained; + private boolean emptyChunk = false; + + private boolean loadedFromFile = false; + + public MCAChunk(MCAFile mcaFile, NBTInputStream nis, int x, int z, boolean readPos) throws IOException { + this.mcaFile = mcaFile; + this.chunkX = x; + this.chunkZ = z; + loadFromNIS(nis, readPos); + } + + public MCAChunk(MCAFile mcaFile, int x, int z) { + this.mcaFile = mcaFile; + this.chunkX = x; + this.chunkZ = z; + } + + public synchronized void loadFromNIS(NBTInputStream inputStream, boolean readPos) throws IOException { + if (emptyChunk) { + return; + } + try (NBTInputStream nis = inputStream) { + nis.mark(Integer.MAX_VALUE); + StreamDelegate initial = new StreamDelegate(); + StreamDelegate empty = initial.add(null); + StreamDelegate initialSectionsDelegate = empty.add("sections"); + initialSectionsDelegate.withInfo((length, type) -> { + sectionCount = length; + sectionPalettes = new char[length][]; + biomePalettes = new BiomeType[length][]; + sectionLongs = new long[length][]; + biomeLongs = new long[length][]; + blocks = new char[length][]; + biomes = new BiomeType[length][]; + sectionsRetained = (HashMap[]) new HashMap[length]; + }); + empty.add("yPos").withInt((i, v) -> minSectionPosition = v); + nis.readNamedTagLazyExceptionally(initial); + // -2 because indices and empty section at bottom + maxSectionPosition = minSectionPosition + sectionCount - 2; + nis.reset(); + + // reset + StreamDelegate first = new StreamDelegate(); + StreamDelegate root = first.add(null).retainOthers(); + root.add("InhabitedTime").withLong((i, v) -> inhabitedTime = v); + root.add("LastUpdate").withLong((i, v) -> lastUpdate = v); + root.add("Status").withValue((ValueReader) (i, v) -> status = Status.valueOf(v + .substring(v.indexOf(":") + 1) + .toUpperCase(Locale.ROOT))); + root.add("xPos").withInt((i, v) -> { + if (!readPos) { + if (chunkX != v) { + throw new IllegalStateException("Stored x position + `" + v + "` doesn't equal given x position `" + chunkX + + "`!"); + } + } else { + chunkX = v; + } + }); + root.add("zPos").withInt((i, v) -> { + if (!readPos) { + if (chunkZ != v) { + throw new IllegalStateException("Stored z position doesn't equal given z position!"); + } + } else { + chunkZ = v; + } + }); + StreamDelegate sectionsDelegate = root.add("sections"); + sectionsDelegate.withElem((ValueReader>) (i, v) -> { + CompoundTag sectionTag = FaweCache.INSTANCE.asTag(v); + + Object yValue = sectionTag.getValue().get("Y").getValue(); // sometimes a byte, sometimes an int + if (!(yValue instanceof Number)) { + LOGGER.warn("Y is not numeric: {}. Skipping.", yValue); + return; + } + int y = ((Number) yValue).intValue(); + if (y < minSectionPosition) { + // Bottom, empty, chunk section + return; + } + + blocks: + { + Tag rawBlockStatesTag = sectionTag + .getValue() + .get("block_states"); // null for sections outside of the world limits + if (rawBlockStatesTag instanceof CompoundTag blockStatesTag) { + // parse palette + List paletteEntries = blockStatesTag.getList("palette", CompoundTag.class); + int paletteSize = paletteEntries.size(); + if (paletteSize == 0) { + blocks[y - minSectionPosition] = new char[]{BlockTypesCache.ReservedIDs.AIR}; + break blocks; + } + char[] palette = new char[paletteSize]; + for (int paletteEntryId = 0; paletteEntryId < paletteSize; paletteEntryId++) { + CompoundTag paletteEntry = paletteEntries.get(paletteEntryId); + BlockType type = BlockTypes.get(paletteEntry.getString("Name")); + if (type == null) { + LOGGER.warn("Invalid block type: {}. Using air", paletteEntry.getString("Name")); + palette[paletteEntryId] = BlockTypes.AIR.getDefaultState().getOrdinalChar(); + continue; + } + BlockState blockState = type.getDefaultState(); + if (paletteEntry.containsKey("Properties")) { + CompoundTag properties; + try { + properties = NBTUtils.getChildTag(paletteEntry.getValue(), "Properties", CompoundTag.class); + for (Property property : blockState.getStates().keySet()) { + if (properties.containsKey(property.getName())) { + String value = properties.getString(property.getName()); + blockState = getBlockStateWith(blockState, property, value); + } + } + } catch (InvalidFormatException e) { + LOGGER.warn(e.getMessage()); + } + } + palette[paletteEntryId] = blockState.getOrdinalChar(); + } + + if (paletteSize == 1) { + blocks[y - minSectionPosition] = palette; + } else { + sectionPalettes[y - minSectionPosition] = palette; + sectionLongs[y - minSectionPosition] = blockStatesTag.getLongArray("data"); + } + } + } + + biomes: + { + Tag rawBlockStatesTag = sectionTag.getValue().get("biomes"); // null for sections outside of the world limits + if (rawBlockStatesTag instanceof CompoundTag biomesTag) { + // parse palette + List paletteEntries = biomesTag.getList("palette"); + int paletteSize = paletteEntries.size(); + if (paletteSize == 0) { + break biomes; + } + BiomeType[] palette = new BiomeType[paletteSize]; + for (int paletteEntryId = 0; paletteEntryId < paletteSize; paletteEntryId++) { + String paletteEntry = paletteEntries.get(paletteEntryId).getValue().toString(); + BiomeType type = BiomeType.REGISTRY.get(paletteEntry); + if (type == null) { + LOGGER.warn("Invalid biome type: {}. Defaulting to plains.", paletteEntry); + palette[paletteEntryId] = BiomeTypes.PLAINS; + continue; + } + palette[paletteEntryId] = type; + } + + if (paletteSize == 1) { + biomes[y - minSectionPosition] = palette; + } else { + biomePalettes[y - minSectionPosition] = palette; + biomeLongs[y - minSectionPosition] = biomesTag.getLongArray("data"); + } + } + } + + int index = y - minSectionPosition; + HashMap tmp = new HashMap<>(); + if (sectionsRetained[index] != null) { + tmp.putAll(sectionsRetained[index]); + } + sectionTag.getValue().forEach((key, val) -> { + if ("biomes".equals(key) || "block_states".equals(key) || "Y".equals(key)) { + return; + } + tmp.put(key, val); + }); + sectionsRetained[index] = tmp; + }); + StreamDelegate blockEntitiesDelegate = root.add("block_entities"); + blockEntitiesDelegate.withElem((ValueReader>) (i, v) -> { + CompoundTag tag = FaweCache.INSTANCE.asTag(v); + int ex = tag.getInt("x"); + int ey = tag.getInt("y"); + int ez = tag.getInt("z"); + + BlockVector3 vec = BlockVector3.at(ex & 15, ey, ez & 15); + tiles.put(vec, tag); + }); + StreamDelegate heightmaps = root.add("Heightmaps"); + for (HeightMapType type : HeightMapType.values()) { + heightmaps.add(type.name()).withValue((ValueReader) (i, v) -> new BitArrayUnstretched(9, 256, v).toRaw( + heightMaps[type.ordinal()])); + } + nis.readNamedTagLazyExceptionally(first); + if (root.getRetained() != null) { + retained.putAll(root.getRetained()); + } + loadedFromFile = true; + } catch (IOException e) { + LOGGER.error("Couldn't read chunk data for {}:{},{}", mcaFile.getFile().getFileName(), chunkX, chunkZ, e); + this.emptyChunk = true; + throw e; + } + } + + public void setEmpty(boolean emptyChunk) { + this.emptyChunk = emptyChunk; + } + + private BlockState getBlockStateWith(BlockState source, Property property, String value) { + return source.with(property, property.getValueFor(value)); + } + + private boolean populateBlocks(int y) { + if (!loadedFromFile) { + try { + mcaFile.loadIntoChunkFromFile(this); + if (this.emptyChunk) { + return false; + } + } catch (IOException e) { + e.printStackTrace(); + return false; + } + } + int index = y - minSectionPosition; + if (sectionLongs[index] == null || sectionPalettes[index] == null) { + return blocks[index] != null; + } + PackedIntArrayReader reader = new PackedIntArrayReader(sectionLongs[index]); + blocks[index] = new char[4096]; + for (int blockPos = 0; blockPos < 4096; blockPos++) { + int i = reader.get(blockPos); + blocks[index][blockPos] = sectionPalettes[index][i]; + } + sectionPalettes[index] = null; + sectionLongs[index] = null; + return true; + } + + private boolean populateBiomes(int y) { + int index = y - minSectionPosition; + if (biomeLongs[index] == null || biomePalettes[index] == null) { + return biomes[index] != null; + } + + PackedIntArrayReader reader = new PackedIntArrayReader(biomeLongs[index], 64); + biomes[index] = new BiomeType[64]; + for (int biomePos = 0; biomePos < 64; biomePos++) { + int i = reader.get(biomePos); + biomes[index][biomePos] = biomePalettes[index][i]; + } + biomePalettes[index] = null; + biomeLongs[index] = null; + return true; + } + + public byte[] toBytes(byte[] buffer) throws IOException { + if (buffer == null) { + buffer = new byte[8192]; + } + FastByteArrayOutputStream buffered = new FastByteArrayOutputStream(buffer); + DataOutputStream dataOut = new DataOutputStream(new DeflaterOutputStream(buffered)); + try (NBTOutputStream nbtOut = new NBTOutputStream((DataOutput) dataOut)) { + write(nbtOut); + } + return buffered.toByteArray(); + } + + public void write(NBTOutputStream nbtOut) throws IOException { + nbtOut.writeLazyCompoundTag("", out -> { + for (Map.Entry entry : retained.entrySet()) { + out.writeNamedTag(entry.getKey(), entry.getValue()); + } + out.writeNamedTag("V", (byte) 1); + out.writeNamedTag("xPos", getX()); + out.writeNamedTag("yPos", minSectionPosition); + out.writeNamedTag("zPos", getZ()); + if (tiles.isEmpty()) { + out.writeNamedEmptyList("block_entities"); + } else { + out.writeNamedTag("block_entities", new ListTag(CompoundTag.class, new ArrayList<>(tiles.values()))); + } + out.writeNamedTag("InhabitedTime", inhabitedTime); + out.writeNamedTag("LastUpdate", lastUpdate); + out.writeNamedTag("Status", "minecraft:" + status.name().toLowerCase()); + out.writeLazyCompoundTag("HeightMaps", heightMapOut -> { + for (int i = 0; i < heightMaps.length; i++) { + BitArrayUnstretched bitArray = new BitArrayUnstretched(9, 256); + bitArray.fromRaw(heightMaps[i]); + heightMapOut.writeNamedTag(HeightMapType.values()[i].name(), bitArray.getData()); + } + }); + out.writeNamedTagName("sections", NBTConstants.TYPE_LIST); + nbtOut.getOutputStream().writeByte(NBTConstants.TYPE_COMPOUND); + int len = 0; + for (int index = 0; index < sectionCount; index++) { + if (hasSection(index + minSectionPosition)) { + len++; + } + } + nbtOut.getOutputStream().writeInt(len); + for (int i = 0; i < sectionCount; i++) { + final int layer = i + minSectionPosition; + final int index = i; + if (!hasSection(layer)) { + continue; + } + out.writeLazyListedCompoundTag(sectionOut -> { + for (Map.Entry entry : sectionsRetained[index].entrySet()) { + sectionOut.writeNamedTag(entry.getKey(), entry.getValue()); + } + sectionOut.writeNamedTag("Y", layer); + if (biomes[index] != null && biomes[index].length == 1) { + sectionOut.writeLazyCompoundTag("biomes", biomesOut -> { + biomesOut.writeNamedTagName("palette", NBTConstants.TYPE_LIST); + nbtOut.getOutputStream().writeByte(NBTConstants.TYPE_STRING); + nbtOut.getOutputStream().writeInt(1); + biomesOut.writeUTF(biomes[index][0].getId()); + }); + } else { + + } + sectionOut.writeLazyCompoundTag("block_states", blocksOut -> { + if (blocks == null || blocks[index] == null) { + blocksOut.writeNamedTagName("palette", NBTConstants.TYPE_LIST); + nbtOut.getOutputStream().writeByte(NBTConstants.TYPE_COMPOUND); + nbtOut.getOutputStream().writeInt(sectionPalettes[index].length); + for (int paletteIndex = 0; paletteIndex < sectionPalettes[index].length; paletteIndex++) { + final int finalPaletteIndex = paletteIndex; + blocksOut.writeLazyListedCompoundTag(paletteOut -> { + int ordinal = sectionPalettes[index][finalPaletteIndex]; + final BlockState state = BlockTypesCache.states[ordinal]; + blocksOut.writeNamedTag("Name", state.getBlockType().getId()); + if (!state.getStates().isEmpty()) { + blocksOut.writeLazyCompoundTag("Properties", propertiesOut -> { + for (Map.Entry, Object> entry : state.getStates().entrySet()) { + propertiesOut.writeNamedTag( + entry.getKey().getName(), + String.valueOf(entry.getValue()) + ); + } + }); + } + }); + } + blocksOut.writeNamedTag("data", sectionLongs[index]); + } else if (blocks[index].length == 1) { + blocksOut.writeNamedTagName("palette", NBTConstants.TYPE_LIST); + nbtOut.getOutputStream().writeByte(NBTConstants.TYPE_COMPOUND); + nbtOut.getOutputStream().writeInt(1); + setSinglePalette(index, blocksOut); + } else { + final int[] blockToPalette = FaweCache.INSTANCE.BLOCK_TO_PALETTE.get(); + final int[] paletteToBlock = FaweCache.INSTANCE.PALETTE_TO_BLOCK.get(); + final long[] blockStates = FaweCache.INSTANCE.BLOCK_STATES.get(); + final int[] blocksCopy = FaweCache.INSTANCE.SECTION_BLOCKS.get(); + try { + int num_palette = createPalette(blockToPalette, paletteToBlock, blocks[index], blocksCopy); + blocksOut.writeNamedTagName("palette", NBTConstants.TYPE_LIST); + nbtOut.getOutputStream().writeByte(NBTConstants.TYPE_COMPOUND); + nbtOut.getOutputStream().writeInt(num_palette); + + if (num_palette == 1) { + setSinglePalette(index, blocksOut); + return; + } + + int bitsPerEntry = MathMan.log2nlz(num_palette - 1); + if (bitsPerEntry > 0 && bitsPerEntry < 5) { + bitsPerEntry = 4; + } + + int bitsPerEntryNonZero = Math.max(bitsPerEntry, 1); // We do want to use zero sometimes + final int blocksPerLong = MathMan.floorZero((double) 64 / bitsPerEntryNonZero); + final int blockBitArrayEnd = MathMan.ceilZero((float) 4096 / blocksPerLong); + + final BitArrayUnstretched bitArray = new BitArrayUnstretched( + bitsPerEntryNonZero, + 4096, + blockStates); + bitArray.fromRaw(blocksCopy); + + //if (bitsPerEntry < 9) { + for (int paletteIndex = 0; paletteIndex < num_palette; paletteIndex++) { + int finalPaletteIndex = paletteIndex; + blocksOut.writeLazyListedCompoundTag(paletteOut -> { + int ordinal = paletteToBlock[finalPaletteIndex]; + blockToPalette[ordinal] = Integer.MAX_VALUE; + final BlockState state = BlockTypesCache.states[ordinal]; + blocksOut.writeNamedTag("Name", state.getBlockType().getId()); + if (!state.getStates().isEmpty()) { + blocksOut.writeLazyCompoundTag("Properties", propertiesOut -> { + for (Map.Entry, Object> entry : state.getStates().entrySet()) { + propertiesOut.writeNamedTag(entry.getKey().getName(), + String.valueOf(entry.getValue())); + } + }); + } + }); + } + //} + + if (bitsPerEntry > 0) { + final long[] bits = Arrays.copyOfRange(blockStates, 0, blockBitArrayEnd); + blocksOut.writeNamedTag("data", bits); + } + } finally { + Arrays.fill(blockToPalette, Integer.MAX_VALUE); + Arrays.fill(paletteToBlock, Integer.MAX_VALUE); + Arrays.fill(blockStates, 0); + Arrays.fill(blocksCopy, 0); + } + } + + }); + }); + } + }); + nbtOut.writeEndTag(); + } + + private void setSinglePalette(int index, NBTOutputStream blocksOut) throws IOException { + BlockState state = BlockState.getFromOrdinal(blocks[index][0]); + blocksOut.writeLazyListedCompoundTag(paletteOut -> { + paletteOut.writeNamedTag("Name", state.getBlockType().getId()); + if (!state.getStates().isEmpty()) { + paletteOut.writeLazyCompoundTag("Properties", propertiesOut -> { + for (Map.Entry, Object> entry : state.getStates().entrySet()) { + propertiesOut.writeNamedTag( + entry.getKey().getName(), + String.valueOf(entry.getValue()) + ); + } + }); + } + }); + } + + private int createPalette(int[] blockToPalette, int[] paletteToBlock, char[] data, int[] dataCopy) { + int num_palette = 0; + for (int i = 0; i < 4096; i++) { + char ordinal = data[i]; + if (ordinal == BlockTypesCache.ReservedIDs.__RESERVED__) { + ordinal = BlockTypesCache.ReservedIDs.AIR; + } + int palette = blockToPalette[ordinal]; + if (palette == Integer.MAX_VALUE) { + blockToPalette[ordinal] = num_palette; + paletteToBlock[num_palette] = ordinal; + num_palette++; + } + } + int bitsPerEntry = MathMan.log2nlz(num_palette - 1); + // If bits per entry is over 8, the game uses the global palette. + if (bitsPerEntry > 8 && WorldEdit + .getInstance() + .getPlatformManager() + .queryCapability(Capability.WORLD_EDITING) + .getIbdToStateOrdinal() != null) { + // Cannot System#array copy char[] -> int[]; + char[] ibdToStateOrdinal = WorldEdit + .getInstance() + .getPlatformManager() + .queryCapability(Capability.WORLD_EDITING) + .getIbdToStateOrdinal(); + //noinspection ConstantConditions - not null from if statement + for (int i = 0; i < ibdToStateOrdinal.length; i++) { + paletteToBlock[i] = ibdToStateOrdinal[i]; + } + //noinspection ConstantConditions - not null if ibdToStateOrdinal is not null + System.arraycopy( + WorldEdit + .getInstance() + .getPlatformManager() + .queryCapability(Capability.WORLD_EDITING) + .getOrdinalToIbdID(), + 0, + blockToPalette, + 0, + WorldEdit + .getInstance() + .getPlatformManager() + .queryCapability(Capability.WORLD_EDITING) + .getOrdinalToIbdID().length + ); + } + for (int i = 0; i < 4096; i++) { + char ordinal = data[i]; + if (ordinal == BlockTypesCache.ReservedIDs.__RESERVED__) { + ordinal = BlockTypesCache.ReservedIDs.AIR; + } + int palette = blockToPalette[ordinal]; + dataCopy[i] = palette; + } + return num_palette; + } + + /** + * Set the chunk as having been modified + */ + public void setModified() { + modified = true; + } + + public boolean isModified() { + return modified; + } + + public boolean isDeleted() { + return deleted; + } + + public Status getStatus() { + return status; + } + + private void checkLoaded() { + if (!loadedFromFile) { + try { + mcaFile.loadIntoChunkFromFile(this); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + private char getOrdinal(int x, int y, int z) { + int layer = y >> 4; + int index = layer - minSectionPosition; + return blocks[index].length == 1 ? blocks[index][0] : blocks[index][(y & 15) << 8 | (z & 15) << 4 | (x & 15)]; + } + + @Override + public boolean hasSection(final int layer) { + checkLoaded(); + if (emptyChunk) { + return false; + } + if (layer < minSectionPosition || layer > maxSectionPosition) { + return false; + } + int index = layer - minSectionPosition; + if (blocks[index] != null) { + return true; + } + if (sectionLongs[index] != null) { + return true; + } + if (biomes[index] != null) { + return true; + } + return biomeLongs[index] != null; + } + + @Override + public char[] load(final int layer) { + checkLoaded(); + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return new char[4096]; + } + int index = layer - minSectionPosition; + if (blocks[index] != null) { + if (blocks[index].length == 4096) { + return blocks[index]; + } else { + char[] result = new char[4096]; + Arrays.fill(result, blocks[index][0]); + return result; + } + } + if (sectionLongs[index] == null) { + return new char[4096]; + } + populateBlocks(layer); + if (blocks[index] != null) { + if (blocks[index].length == 4096) { + return blocks[index]; + } else { + char[] result = new char[4096]; + Arrays.fill(result, blocks[index][0]); + return result; + } + } + return new char[4096]; + } + + @Nullable + @Override + public char[] loadIfPresent(final int layer) { + checkLoaded(); + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return null; + } + int index = layer - minSectionPosition; + if (blocks[index] != null) { + if (blocks[index].length == 4096) { + return blocks[index]; + } else { + char[] result = new char[4096]; + Arrays.fill(result, blocks[index][0]); + return result; + } + } + if (sectionLongs[index] == null) { + return null; + } + populateBlocks(layer); + if (blocks[index] != null) { + if (blocks[index].length == 4096) { + return blocks[index]; + } else { + char[] result = new char[4096]; + Arrays.fill(result, blocks[index][0]); + return result; + } + } + return null; + } + + @Override + public Map getTiles() { + checkLoaded(); + return tiles; + } + + @Override + public CompoundTag getTile(final int x, final int y, final int z) { + checkLoaded(); + return tiles.get(BlockVector3.at((x & 15) + (chunkX << 4), y, (z & 15) + (chunkZ << 4))); + } + + @Override + public Set getEntities() { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public void removeSectionLighting(final int layer, final boolean sky) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public boolean trim(final boolean aggressive, final int layer) { + return true; + } + + @Override + public boolean setBiome(final int x, final int y, final int z, final BiomeType biome) { + checkLoaded(); + int layer = y >> 4; + if (emptyChunk | layer < minSectionPosition || layer > maxSectionPosition) { + return false; + } + if (!populateBiomes(layer)) { + return false; + } + if (emptyChunk) { + return false; + } + int index = layer - minSectionPosition; + int bx = (x & 15) >> 2; + int by = (y & 15) >> 2; + int bz = (z & 15) >> 2; + if (biomes[index].length == 1) { + if (biomes[index][0] == biome) { + return true; + } + biomes[index] = new BiomeType[64]; + Arrays.fill(biomes[index], biomes[index][0]); + } + biomes[index][by << 4 | bz << 2 | bx] = biome; + setModified(); + return true; + } + + @Override + public > boolean setBlock(final int x, final int y, final int z, final T holder) { + checkLoaded(); + int layer = y >> 4; + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return false; + } + if (!populateBlocks(layer)) { + return false; + } + if (emptyChunk) { + return false; + } + int index = layer - minSectionPosition; + char toSet = holder.getOrdinalChar(); + if (blocks[index].length == 1) { + if (blocks[index][0] == toSet) { + return true; + } + blocks[index] = new char[4096]; + Arrays.fill(blocks[index], blocks[index][0]); + } + blocks[index][(y & 15) << 8 | (z & 15) << 4 | (x & 15)] = toSet; + setModified(); + return true; + } + + @Override + public void setBlocks(final int layer, final char[] data) { + checkLoaded(); + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return; + } + int index = layer - minSectionPosition; + blocks[index] = data; + setModified(); + } + + @Override + public boolean isEmpty() { + checkLoaded(); + if (emptyChunk) { + return true; + } + for (int layer = minSectionPosition; layer <= maxSectionPosition; layer++) { + if (hasSection(layer)) { + return true; + } + } + return false; + } + + @Override + public boolean setTile(final int x, final int y, final int z, final CompoundTag tile) { + checkLoaded(); + if (emptyChunk) { + return false; + } + tiles.put(BlockVector3.at((x & 15) + (chunkX << 4), y, (z & 15) + (chunkZ << 4)), tile); + setModified(); + return true; + } + + @Override + public void setBlockLight(final int x, final int y, final int z, final int value) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void setSkyLight(final int x, final int y, final int z, final int value) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void setHeightMap(final HeightMapType type, final int[] heightMap) { + checkLoaded(); + heightMaps[type.ordinal()] = heightMap; + } + + @Override + public void setLightLayer(final int layer, final char[] toSet) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void setSkyLightLayer(final int layer, final char[] toSet) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void setFullBright(final int layer) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void setEntity(final CompoundTag tag) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public void removeEntity(final UUID uuid) { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public Set getEntityRemoves() { + return null; + } + + @Override + public BiomeType[][] getBiomes() { + checkLoaded(); + if (emptyChunk) { + return null; + } + for (int index = 0; index < biomes.length; index++) { + BiomeType[] sectionBiomes = biomes[index]; + if (sectionBiomes.length == 1) { + biomes[index] = new BiomeType[64]; + Arrays.fill(biomes[index], biomes[index][0]); + } + } + return biomes; + } + + @Override + public char[][] getLight() { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public char[][] getSkyLight() { + throw new UnsupportedOperationException("Not supported by Anvil queue mode."); + } + + @Override + public boolean hasBiomes(final int layer) { + checkLoaded(); + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return false; + } + return biomes.length != 0 && biomes[layer - minSectionPosition] != null; + } + + @Override + public int getSectionCount() { + checkLoaded(); + return sectionCount; + } + + @Override + public int getMaxSectionPosition() { + checkLoaded(); + return maxSectionPosition; + } + + @Override + public int getMinSectionPosition() { + checkLoaded(); + return minSectionPosition; + } + + @Override + public BaseBlock getFullBlock(final int x, final int y, final int z) { + checkLoaded(); + if (emptyChunk) { + return __RESERVED__BASE; + } + BlockState state = getBlock(x, y, z); + if (state == null) { + return __RESERVED__BASE; + } + CompoundTag tag = getTile(x, y, z); + return tag == null ? state.toBaseBlock() : state.toBaseBlock(tag); + } + + @Override + public BiomeType getBiomeType(final int x, final int y, final int z) { + checkLoaded(); + int layer = y >> 4; + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return null; + } + if (populateBiomes(layer)) { + if (emptyChunk) { + return null; + } + int index = layer - minSectionPosition; + int bx = (x & 15) >> 2; + int by = (y & 15) >> 2; + int bz = (z & 15) >> 2; + return biomes[index][by << 4 | bz << 2 | bx]; + } + return null; + } + + @Override + public BlockState getBlock(final int x, final int y, final int z) { + checkLoaded(); + int layer = y >> 4; + if (emptyChunk || layer < minSectionPosition || layer > maxSectionPosition) { + return __RESERVED__STATE; + } + if (populateBlocks(layer)) { + if (emptyChunk) { + return __RESERVED__STATE; + } + return BlockState.getFromOrdinal(getOrdinal(x, y, z)); + } + return __RESERVED__STATE; + } + + @Override + public int getSkyLight(final int x, final int y, final int z) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public int getEmittedLight(final int x, final int y, final int z) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public int[] getHeightMap(final HeightMapType type) { + checkLoaded(); + return heightMaps[type.ordinal()]; + } + + @Override + public > V call(final IChunkSet set, final Runnable finalize) { +// if (status != Status.FULL) { +// return (V) Futures.immediateFuture(null); +// } + tiles.entrySet().removeIf(e -> { + BlockVector3 pos = e.getKey(); + return set + .getBlock(pos.getBlockX(), pos.getBlockY(), pos.getBlockZ()) + .getOrdinalChar() != BlockTypesCache.ReservedIDs.__RESERVED__; + }); + for (int layer = set.getMinSectionPosition(); layer <= set.getMaxSectionPosition(); layer++) { + char[] toSet = set.loadIfPresent(layer); + int index = layer - minSectionPosition; + int setIndex = layer - set.getMinSectionPosition(); + if (toSet != null) { + if (populateBlocks(layer)) { + if (blocks[index].length == 1) { + char c = blocks[index][0]; + blocks[index] = new char[4096]; + Arrays.fill(blocks[index], c); + } + for (int i = 0; i < 4096; i++) { + char c = toSet[i]; + if (c != BlockTypesCache.ReservedIDs.__RESERVED__) { + blocks[index][i] = c; + } + } + } + } + if (set.hasBiomes(layer)) { + if (populateBiomes(layer)) { + if (biomes[index].length == 1) { + BiomeType b = biomes[index][0]; + blocks[index] = new char[4096]; + Arrays.fill(biomes[index], b); + } + for (int i = 0; i < 64; i++) { + BiomeType b = set.getBiomes()[setIndex][i]; + if (b != null) { + biomes[index][i] = b; + } + } + } + } + if (set.getTiles() != null) { + tiles.putAll(set.getTiles()); + } + } + mcaFile.setChunk(this); + + //noinspection unchecked - required at compile time + return (V) (Future) Fawe.instance().getQueueHandler().sync(finalize); + } + + @Override + public CompoundTag getEntity(final UUID uuid) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public boolean isCreateCopy() { + return false; + } + + @Override + public void setCreateCopy(final boolean createCopy) { + if (createCopy) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + } + + @Override + public void setLightingToGet(final char[][] lighting, final int startSectionIndex, final int endSectionIndex) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public void setSkyLightingToGet(final char[][] lighting, final int startSectionIndex, final int endSectionIndex) { + throw new UnsupportedOperationException("Not supported in Anvil queue mode."); + } + + @Override + public void setHeightmapToGet(final HeightMapType type, final int[] data) { + checkLoaded(); + heightMaps[type.ordinal()] = data; + } + + @Override + public int getMinY() { + checkLoaded(); + return minSectionPosition >> 4; + } + + @Override + public int getMaxY() { + checkLoaded(); + return (maxSectionPosition >> 4) + 15; + } + + @Override + public boolean trim(final boolean aggressive) { + return false; + } + + public int getX() { + return chunkX; + } + + public int getZ() { + return chunkZ; + } + + @Override + public void filterBlocks( + final Filter filter, final ChunkFilterBlock block, @Nullable final Region region, final boolean full + ) { + + } + + public long getInhabitedTime() { + return inhabitedTime; + } + + public long getLastUpdate() { + return lastUpdate; + } + + public void setLastUpdate(final long lastUpdate) { + this.lastUpdate = lastUpdate; + } + + public enum Status { + EMPTY, + STRUCTURE_STARTS, + STRUCTURE_REFERENCES, + BIOMES, + NOISE, + SURFACE, + CARVERS, + LIQUID_CARVERS, + FEATURES, + INITIALIZE_LIGHT, + LIGHT, + SPAWN, + HEIGHTMAPS, + FULL + } + + @Override + public int hashCode() { + return MathMan.pairSearchCoords(chunkX, chunkZ); + } + +} diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAFile.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAFile.java new file mode 100644 index 000000000..9787b33c9 --- /dev/null +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAFile.java @@ -0,0 +1,595 @@ +package com.fastasyncworldedit.core.anvil; + +import com.fastasyncworldedit.core.math.FastBitSet; +import com.fastasyncworldedit.core.util.task.RunnableVal4; +import com.plotsquared.core.util.task.RunnableVal; +import com.sk89q.jnbt.NBTInputStream; +import com.sk89q.worldedit.MissingWorldException; +import com.sk89q.worldedit.internal.util.LogManagerCompat; +import com.sk89q.worldedit.util.nbt.BinaryTagIO; +import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap; +import it.unimi.dsi.fastutil.ints.Int2ObjectMap; +import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; +import it.unimi.dsi.fastutil.io.FastByteArrayInputStream; +import org.apache.logging.log4j.Logger; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import java.io.BufferedInputStream; +import java.io.Closeable; +import java.io.Flushable; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.zip.GZIPInputStream; +import java.util.zip.InflaterInputStream; + +@SuppressWarnings({"removal"}) +public class MCAFile implements Closeable, Flushable { + + private static final Logger LOGGER = LogManagerCompat.getLogger(); + + private static final int CHUNK_HEADER_SIZE = 5; + private static final byte VERSION_GZIP = 1; + private static final byte VERSION_DEFLATE = 2; + private static final byte VERSION_UNCOMPRESSED = 3; + private static final int SECTOR_BYTES = 4096; + private static final int SECTOR_INTS = SECTOR_BYTES / 4; + + private final Int2IntOpenHashMap offsetMap; + private final Path file; + private RandomAccessFile raf; + private int[] offsets; + private boolean deleted; + private final int X, Z; + private final Int2ObjectOpenHashMap chunks = new Int2ObjectOpenHashMap<>(); + private FastBitSet sectorFree; + private boolean closed = false; + private volatile boolean init = false; + + public MCAFile(Path file) { + this.file = file; + if (!Files.exists(file)) { + throw new MissingWorldException(); + } + String[] split = file.getFileName().toString().split("\\."); + X = Integer.parseInt(split[1]); + Z = Integer.parseInt(split[2]); + offsetMap = new Int2IntOpenHashMap(); + offsetMap.defaultReturnValue(Integer.MAX_VALUE); + init(); + } + + public MCAFile(int mcrX, int mcrZ, Path file) { + this.file = file; + X = mcrX; + Z = mcrZ; + offsetMap = new Int2IntOpenHashMap(); + offsetMap.defaultReturnValue(Integer.MAX_VALUE); + init(); + } + + public void clear() { + if (raf != null) { + try { + raf.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + synchronized (chunks) { + chunks.clear(); + } + offsetMap.clear(); + offsets = null; + } + + /** + * Set if the file should be delete + */ + public void setDeleted(boolean deleted) { + if (!init) { + init(); + } + this.deleted = deleted; + } + + /** + * Get if the file has been set to be deleted + */ + public boolean isDeleted() { + return deleted; + } + + /** + * Initialises the RandomAccessFile and loads the location header from disk if not done yet + */ + public synchronized void init() { + try { + if (raf == null) { + this.offsets = new int[SECTOR_INTS]; + if (file != null) { + this.raf = new RandomAccessFile(file.toFile(), "rw"); + final int nSectors = (int) Math.round(Math.ceil((double) raf.length() / SECTOR_BYTES)); + sectorFree = new FastBitSet(nSectors); + sectorFree.setAll(); + sectorFree.set(0, false); + sectorFree.set(1, false); + if (raf.length() < 8192) { + raf.setLength(8192); + } else { + if ((raf.length() & 0xFFF) != 0) { + raf.setLength(((raf.length() + 0xFFF) >> 12) << 12); + } + raf.seek(0); + for (int i = 0; i < SECTOR_INTS; i++) { + final int offset = raf.readInt(); + offsets[i] = offset; + int sectorStart = offset >> 8; + int numSectors = offset & 0xFF; + if (offset != 0 && sectorStart + numSectors <= sectorFree.size()) { + offsetMap.put(offset, i); + for (int sectorNum = 0; sectorNum < (offset & 0xFF); sectorNum++) { + sectorFree.set((offset >> 8) + sectorNum, false); + } + } + } + } + } + init = true; + closed = false; + } + } catch (Throwable e) { + e.printStackTrace(); + } + } + + /** + * Get the region file X + */ + public int getX() { + return X; + } + + /** + * Get the region file Z + */ + public int getZ() { + return Z; + } + + /** + * Get the RandomAccessFile of the MCA region file + */ + public RandomAccessFile getRandomAccessFile() { + return raf; + } + + /** + * Get the MCA region file + */ + public Path getFile() { + return file; + } + + /** + * Gets a cached {@link MCAChunk} if present else returns null + */ + @Nullable + public MCAChunk getCachedChunk(int cx, int cz) { + if (!init) { + init(); + } + short pair = (short) ((cx & 31) + ((cz & 31) << 5)); + synchronized (chunks) { + return chunks.get(pair); + } + } + + /** + * Create a new empty {@link MCAChunk}. + */ + public MCAChunk newChunk(int cx, int cz) { + if (!init) { + init(); + } + short pair = (short) ((cx & 31) + ((cz & 31) << 5)); + MCAChunk chunk; + synchronized (chunks) { + chunks.put(pair, chunk = new MCAChunk(this, cx, cx)); + } + return chunk; + } + + /** + * Insert a {@link MCAChunk} into the cache. + */ + public void setChunk(MCAChunk chunk) { + if (!init) { + init(); + } + int cx = chunk.getX(); + int cz = chunk.getZ(); + short pair = (short) ((cx & 31) + ((cz & 31) << 5)); + synchronized (chunks) { + chunks.put(pair, chunk); + } + } + + /** + * Load data from the mca region into the given {@link MCAChunk}. + */ + public void loadIntoChunkFromFile(MCAChunk chunk) throws IOException { + if (!init) { + init(); + } + int cx = chunk.getX(); + int cz = chunk.getZ(); + int i = (cx & 31) + ((cz & 31) << 5); + int offset = offsets[i]; + synchronized (this) { + if (offset == 0) { + chunk.setEmpty(true); + return; + } + chunk.loadFromNIS(getChunkIS(offset >> 8), false); + if (offset == 0) { + return; + } + if (i < 2) { + int length; + byte version; + byte[] data; + synchronized (this) { + raf.seek((long) (offset >> 8) << 12); + length = raf.readInt(); + version = raf.readByte(); + data = new byte[length - 1]; + raf.read(data); + } + FastByteArrayInputStream bais = new FastByteArrayInputStream(data); + BufferedInputStream bis = switch (version) { + case VERSION_GZIP -> new BufferedInputStream(new GZIPInputStream(bais)); + case VERSION_DEFLATE -> new BufferedInputStream(new InflaterInputStream(bais)); + case VERSION_UNCOMPRESSED -> new BufferedInputStream(bais); + default -> throw new IllegalStateException("Unexpected compression version: " + version); + }; + } + } + } + + @Nonnull + public MCAChunk getChunk(int cx, int cz) throws IOException { + if (!init) { + init(); + } + MCAChunk cached = getCachedChunk(cx, cz); + if (cached != null) { + return cached; + } else { + return readChunk(cx, cz); + } + } + + public MCAChunk readChunk(int cx, int cz) throws IOException { + if (!init) { + init(); + } + int i = (cx & 31) + ((cz & 31) << 5); + int offset = offsets[i]; + if (offset == 0) { + return newChunk(cx, cz); + } + try { + MCAChunk chunk; + synchronized (this) { + chunk = getChunkIS(offset >> 8, cx, cz); + } + if (i < 2) { + int length; + byte version; + byte[] data; + synchronized (this) { + raf.seek((long) (offset >> 8) << 12); + length = raf.readInt(); + version = raf.readByte(); + data = new byte[length - 1]; + raf.read(data); + } + FastByteArrayInputStream bais = new FastByteArrayInputStream(data); + BufferedInputStream bis = switch (version) { + case VERSION_GZIP -> new BufferedInputStream(new GZIPInputStream(bais)); + case VERSION_DEFLATE -> new BufferedInputStream(new InflaterInputStream(bais)); + case VERSION_UNCOMPRESSED -> new BufferedInputStream(bais); + default -> throw new IllegalStateException("Unexpected compression version: " + version); + }; + } + short pair = (short) ((cx & 31) + ((cz & 31) << 5)); + synchronized (chunks) { + chunks.put(pair, chunk); + } + return chunk; + } catch (Exception e) { + throw new RuntimeException("Error attempting to read chunk locally located at `" + (cx & 31) + "," + (cz & 31) + "`" + + " in file `" + file.getFileName() + "` at offset: `" + (offset >> 8) + "`", e); + } + } + + /** + * @param onEach cx, cz, offset, size (in kB) + */ + public void forEachChunk(RunnableVal4 onEach) { + if (!init) { + init(); + } + int i = 0; + for (int z = 0; z < 32; z++) { + for (int x = 0; x < 32; x++, i += 4) { + int offset = offsets[x + (z << 5)]; + if (offset != 0) { + int size = offset & 0xFF; + onEach.run(x, z, offset >> 8, size); + } + } + } + } + + public void forEachChunk(RunnableVal onEach) { + if (!init) { + init(); + } + int rx = X << 5; + int rz = Z << 5; + for (int z = 0; z < 32; z++) { + for (int x = 0; x < 32; x++) { + int offset = offsets[x + (z << 5)]; + if (offset != 0) { + try { + onEach.run(getChunk(rx + x, rz + z)); + } catch (Throwable ignore) { + } + } + } + } + } + + private NBTInputStream getChunkIS(int offset) throws IOException { + int length = -1; + byte version = -1; + byte[] data; + try { + if (offset == 0) { + return null; + } + synchronized (this) { + raf.seek((long) offset << 12); + length = raf.readInt(); + version = raf.readByte(); + data = new byte[length - 1]; + raf.read(data); + } + FastByteArrayInputStream bais = new FastByteArrayInputStream(data); + BufferedInputStream bis = switch (version) { + case VERSION_GZIP -> new BufferedInputStream(new GZIPInputStream(bais)); + case VERSION_DEFLATE -> new BufferedInputStream(new InflaterInputStream(bais)); + case VERSION_UNCOMPRESSED -> new BufferedInputStream(bais); + default -> throw new IllegalStateException("Unexpected compression version: " + version); + }; + return new NBTInputStream(bis); + } catch (IOException e) { + throw new IOException("Length: " + length + ", version: " + version + ", offset: " + offset, e); + } + } + + private MCAChunk getChunkIS(int offset, int cx, int cz) throws IOException { + int length = -1; + byte version = -1; + byte[] data; + try { + if (offset == 0) { + return null; + } + synchronized (this) { + raf.seek((long) offset << 12); + length = raf.readInt(); + version = raf.readByte(); + data = new byte[length - 1]; + raf.read(data); + } + FastByteArrayInputStream bais = new FastByteArrayInputStream(data); + BufferedInputStream bis = switch (version) { + case VERSION_GZIP -> new BufferedInputStream(new GZIPInputStream(bais)); + case VERSION_DEFLATE -> new BufferedInputStream(new InflaterInputStream(bais)); + case VERSION_UNCOMPRESSED -> new BufferedInputStream(bais); + default -> throw new IllegalStateException("Unexpected compression version: " + version); + }; + return new MCAChunk(this, new NBTInputStream(bis), cx, cz, false); + } catch (Exception e) { + throw new IOException("Length: " + length + ", version: " + version + ", offset: " + offset, e); + } + } + + public List getCachedChunks() { + synchronized (chunks) { + return new ArrayList<>(chunks.values()); + } + } + + public void uncache(int cx, int cz) { + int pair = (cx & 31) + ((cz & 31) << 5); + synchronized (chunks) { + chunks.remove(pair); + } + } + + @Override + public synchronized void close() throws IOException { + if (raf == null || closed) { + return; + } + flush(); + try { + raf.close(); + } catch (IOException e) { + e.printStackTrace(); + } + raf = null; + offsets = null; + offsetMap.clear(); + closed = true; + init = false; + } + + public boolean isModified() { + if (isDeleted()) { + return true; + } + synchronized (chunks) { + for (Int2ObjectMap.Entry entry : chunks.int2ObjectEntrySet()) { + MCAChunk chunk = entry.getValue(); + if (chunk.isModified() || chunk.isDeleted()) { + return true; + } + } + } + return false; + } + + public synchronized void setOffset(final int x, final int z, final int offset) + throws IOException { + int i = (x & 31) + ((z & 31) << 5); + if (offset == 0) { + offsetMap.remove(offsets[i]); + } else { + offsetMap.put(offset, i); + } + offsets[x + (z << 5)] = offset; + raf.seek((long) i << 2); + raf.writeInt(offset); + } + + /** + * Write the chunk to the file + */ + @Override + public synchronized void flush() throws IOException { + boolean delete = true; + int currentSector = 2; + Queue offsets = + new LinkedBlockingDeque<>(Arrays.stream(this.offsets).boxed().sorted(Comparator.comparingInt(i -> (i >> 8))).toList()); + int offset; + int count = 0; + while (offsets.peek() != null) { + count++; + offset = offsets.poll(); + if (offset == 0) { + continue; + } + delete = false; + int pair = offsetMap.get(offset); + int sectorNumber = offset >> 8; + int sectorsAllocated = offset & 0xFF; + if (sectorNumber < 2) { + throw new IllegalStateException("Sector number cannot be < 2!"); + } + if (pair == Integer.MAX_VALUE) { + sectorFree.setRange(sectorNumber, sectorNumber + sectorsAllocated); + continue; + } + MCAChunk chunk = chunks.remove(pair); + + byte[] data; + int sectorsNeeded; + boolean writeChunkHeader; + + if (currentSector <= sectorNumber) { // Only set free if we definitely won't be overwriting + sectorFree.setRange(sectorNumber, sectorNumber + sectorsAllocated); + } else { + throw new IllegalStateException("Current sector number being written to cannot exceed sector number of chunk to" + + " be written!"); + } + if (chunk == null) { + if (currentSector != sectorNumber) { + writeChunkHeader = false; + data = new byte[sectorsAllocated << 12]; + sectorsNeeded = sectorsAllocated; + raf.seek((long) sectorNumber << 12); + raf.read(data); + } else { + sectorFree.clearRange(currentSector, currentSector + sectorsAllocated); + currentSector += sectorsAllocated; + continue; + } + } else if (chunk.isDeleted()) { + int x = pair & 31; + int z = (pair >> 5) & 31; + setOffset(x, z, 0); + continue; + } else { + data = chunk.toBytes(null); + writeChunkHeader = true; + sectorsNeeded = ((data.length + CHUNK_HEADER_SIZE) >> 12) + 1; + } + + boolean hasSpace = sectorsNeeded <= sectorsAllocated; + long position = (long) currentSector << 12; + if (!hasSpace) { + hasSpace = true; + for (int i = currentSector; i < currentSector + sectorsNeeded; i++) { + if (!sectorFree.get(i) || i > sectorFree.size()) { + hasSpace = false; + break; + } + } + if (!hasSpace) { + if (sectorNumber > 2500 || count > 30000) { + throw new RuntimeException(); + } + sectorNumber = sectorFree.size() + 1; + offset = (sectorNumber << 8) | (sectorsNeeded & 0xFF); + setOffset(pair & 31, (pair >> 5) & 31, offset); + position = (long) sectorNumber << 12; + raf.setLength((long) (sectorNumber + sectorsNeeded + 1) << 12); + sectorFree.expandTo(sectorNumber + sectorsNeeded, true); + sectorFree.clearRange(sectorNumber, sectorNumber + sectorsNeeded); + offsets.add(offset); // Come back later to clean up. + } + } + raf.seek(position); + if (writeChunkHeader) { + raf.writeInt(data.length + 1); + raf.writeByte(VERSION_DEFLATE); + } + raf.write(data); + if (hasSpace) { + offset = (currentSector << 8) | (sectorsNeeded & 0xFF); + setOffset(pair & 31, (pair >> 5) & 31, offset); + sectorFree.clearRange(currentSector, currentSector + sectorsNeeded); + currentSector += sectorsNeeded; + } + } + int size = 0; + for (int i = sectorFree.size(); i > 0; i--) { + if (!sectorFree.get(i)) { + size = i + 1; + break; + } + } + raf.setLength((long) (size + 1) * SECTOR_BYTES); + if (delete || size < 3) { + clear(); + Files.delete(file); + } + synchronized (chunks) { + chunks.clear(); + } + } + +} diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAWorld.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAWorld.java new file mode 100644 index 000000000..f06b15182 --- /dev/null +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/anvil/MCAWorld.java @@ -0,0 +1,202 @@ +package com.fastasyncworldedit.core.anvil; + +import com.fastasyncworldedit.core.Fawe; +import com.fastasyncworldedit.core.queue.IChunkGet; +import com.fastasyncworldedit.core.queue.implementation.packet.ChunkPacket; +import com.fastasyncworldedit.core.util.MathMan; +import com.sk89q.jnbt.CompoundTag; +import com.sk89q.worldedit.EditSession; +import com.sk89q.worldedit.MaxChangedBlocksException; +import com.sk89q.worldedit.WorldEditException; +import com.sk89q.worldedit.blocks.BaseItemStack; +import com.sk89q.worldedit.entity.Player; +import com.sk89q.worldedit.internal.util.LogManagerCompat; +import com.sk89q.worldedit.math.BlockVector3; +import com.sk89q.worldedit.math.Vector3; +import com.sk89q.worldedit.util.SideEffect; +import com.sk89q.worldedit.util.SideEffectSet; +import com.sk89q.worldedit.util.TreeGenerator; +import com.sk89q.worldedit.world.AbstractWorld; +import com.sk89q.worldedit.world.block.BlockState; +import com.sk89q.worldedit.world.block.BlockStateHolder; +import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; +import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.Nullable; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class MCAWorld extends AbstractWorld { + + private static final Logger LOGGER = LogManagerCompat.getLogger(); + + private static final Int2ObjectOpenHashMap worldMap = new Int2ObjectOpenHashMap<>(); + private final String name; + private final Path folder; + private final Path regionFolder; + private final Int2ObjectOpenHashMap mcaFileCache = new Int2ObjectOpenHashMap<>(); + + private MCAWorld(String name, Path folder) { + this.name = name; + this.folder = folder; + this.regionFolder = folder.resolve("region"); + } + + /** + * New MCAWorld instance. + * + * @param name World name + */ + public static synchronized MCAWorld of(String name) { + return of(name, Fawe.platform().getWorldsFolder().resolve(name)); + } + + /** + * New MCAWorld instance. + * + * @param name World name + * @param folder World file/folder + */ + public static synchronized MCAWorld of(String name, Path folder) { + if (Fawe.platform().isWorldLoaded(name)) { + throw new IllegalStateException("World " + name + " is loaded. Anvil operations cannot be completed on a loaded world."); + } + // World could be the same name but in a different folder + int combinedHash = Objects.hash(name, folder); + return worldMap.computeIfAbsent(combinedHash, (i) -> new MCAWorld(name, folder)); + } + + public Collection getMCAs() { + getRegionFileFiles().forEach(file -> { + String[] split = file.getFileName().toString().split("\\."); + short regionX = Short.parseShort(split[1]); + short regionZ = Short.parseShort(split[2]); + int paired = MathMan.pair(regionX, regionZ); + mcaFileCache.computeIfAbsent( + paired, + (i) -> new MCAFile(regionX, regionZ, regionFolder.resolve("r." + regionX + "." + regionZ + ".mca")) + ); + }); + return mcaFileCache.values(); + } + + public List getRegionFileFiles() { + try { + return Files.list(regionFolder).filter(p -> p.toString().endsWith(".mca")).toList(); + } catch (IOException e) { + LOGGER.error("Error listing region files", e); + return Collections.emptyList(); + } + } + + @Override + public boolean setTile(final int x, final int y, final int z, final CompoundTag tile) throws WorldEditException { + return false; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getNameUnsafe() { + return name; + } + + public Path getFolder() { + return folder; + } + + @Override + public > boolean setBlock( + final BlockVector3 position, + final B block, + final SideEffectSet sideEffects + ) throws WorldEditException { + return false; + } + + @Override + public Set applySideEffects( + final BlockVector3 position, + final BlockState previousType, + final SideEffectSet sideEffectSet + ) throws WorldEditException { + return null; + } + + @Override + public boolean clearContainerBlockContents(final BlockVector3 position) { + return false; + } + + @Override + public void dropItem(final Vector3 position, final BaseItemStack item) { + + } + + @Override + public void simulateBlockMine(final BlockVector3 position) { + + } + + @Override + public boolean generateTree( + final TreeGenerator.TreeType type, + final EditSession editSession, + final BlockVector3 position + ) throws MaxChangedBlocksException { + return false; + } + + @Override + public BlockVector3 getSpawnPosition() { + return null; + } + + @Override + public void refreshChunk(final int chunkX, final int chunkZ) { + throw new UnsupportedOperationException("Not supported in anvil operations."); + } + + @Override + public IChunkGet get(final int chunkX, final int chunkZ) { + short regionX = (short) (chunkX >> 5); + short regionZ = (short) (chunkZ >> 5); + int paired = MathMan.pair(regionX, regionZ); + MCAFile mca = mcaFileCache.computeIfAbsent( + paired, + (i) -> new MCAFile(regionX, regionZ, regionFolder.resolve("r." + regionX + "." + regionZ + ".mca")) + ); + try { + return mca.getChunk(chunkX, chunkZ); + } catch (IOException e) { + LOGGER.error("Error loading chunk. Creating empty chunk.", e); + return mca.newChunk(chunkX, chunkZ); + } + } + + @Override + public void sendFakeChunk(@Nullable final Player player, final ChunkPacket packet) { + throw new UnsupportedOperationException("Not supported in anvil operations."); + } + + @Override + public synchronized void flush() { + for (MCAFile mca : mcaFileCache.values()) { + try { + mca.close(); + } catch (IOException e) { + LOGGER.error("Could not flush MCAFile {}", mca.getFile().getFileName(), e); + } + } + } + +} diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/internal/io/BufferedRandomAccessFile.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/internal/io/BufferedRandomAccessFile.java new file mode 100644 index 000000000..b5b26f56b --- /dev/null +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/internal/io/BufferedRandomAccessFile.java @@ -0,0 +1,434 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.fastasyncworldedit.core.internal.io; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.util.Arrays; + + +/** + * A BufferedRandomAccessFile is like a + * RandomAccessFile, but it uses a private buffer so that most + * operations do not require a disk access. + *

+ * + * Note: The operations on this class are unmonitored. Also, the correct + * functioning of the RandomAccessFile methods that are not + * overridden here relies on the implementation of those methods in the + * superclass. + * Author : Avinash Lakshman ( alakshman@facebook.com) & Prashant Malik ( pmalik@facebook.com ) + */ + +public class BufferedRandomAccessFile extends RandomAccessFile { + static final int LogBuffSz_ = 16; // 64K buffer + public static final int BuffSz_ = (1 << LogBuffSz_); + static final long BuffMask_ = ~(((long) BuffSz_) - 1L); + + /* + * This implementation is based on the buffer implementation in Modula-3's + * "Rd", "Wr", "RdClass", and "WrClass" interfaces. + */ + private boolean dirty_; // true iff unflushed bytes exist + private boolean closed_; // true iff the file is closed + private long curr_; // current position in file + private long lo_, hi_; // bounds on characters in "buff" + private byte[] buff_; // local buffer + private long maxHi_; // this.lo + this.buff.length + private boolean hitEOF_; // buffer contains last file block? + private long diskPos_; // disk position + + /* + * To describe the above fields, we introduce the following abstractions for + * the file "f": + * + * len(f) the length of the file curr(f) the current position in the file + * c(f) the abstract contents of the file disk(f) the contents of f's + * backing disk file closed(f) true iff the file is closed + * + * "curr(f)" is an index in the closed interval [0, len(f)]. "c(f)" is a + * character sequence of length "len(f)". "c(f)" and "disk(f)" may differ if + * "c(f)" contains unflushed writes not reflected in "disk(f)". The flush + * operation has the effect of making "disk(f)" identical to "c(f)". + * + * A file is said to be *valid* if the following conditions hold: + * + * V1. The "closed" and "curr" fields are correct: + * + * f.closed == closed(f) f.curr == curr(f) + * + * V2. The current position is either contained in the buffer, or just past + * the buffer: + * + * f.lo <= f.curr <= f.hi + * + * V3. Any (possibly) unflushed characters are stored in "f.buff": + * + * (forall i in [f.lo, f.curr): c(f)[i] == f.buff[i - f.lo]) + * + * V4. For all characters not covered by V3, c(f) and disk(f) agree: + * + * (forall i in [f.lo, len(f)): i not in [f.lo, f.curr) => c(f)[i] == + * disk(f)[i]) + * + * V5. "f.dirty" is true iff the buffer contains bytes that should be + * flushed to the file; by V3 and V4, only part of the buffer can be dirty. + * + * f.dirty == (exists i in [f.lo, f.curr): c(f)[i] != f.buff[i - f.lo]) + * + * V6. this.maxHi == this.lo + this.buff.length + * + * Note that "f.buff" can be "null" in a valid file, since the range of + * characters in V3 is empty when "f.lo == f.curr". + * + * A file is said to be *ready* if the buffer contains the current position, + * i.e., when: + * + * R1. !f.closed && f.buff != null && f.lo <= f.curr && f.curr < f.hi + * + * When a file is ready, reading or writing a single byte can be performed + * by reading or writing the in-memory buffer without performing a disk + * operation. + */ + + /** + * Open a new BufferedRandomAccessFile on file + * in mode mode, which should be "r" for reading only, or + * "rw" for reading and writing. + */ + public BufferedRandomAccessFile(File file, String mode) throws IOException { + super(file, mode); + this.init(0); + } + + public BufferedRandomAccessFile(File file, String mode, int size) throws IOException { + super(file, mode); + this.init(size); + } + + /** + * Open a new BufferedRandomAccessFile on the file named + * name in mode mode, which should be "r" for + * reading only, or "rw" for reading and writing. + */ + public BufferedRandomAccessFile(String name, String mode) throws IOException { + super(name, mode); + this.init(0); + } + + public BufferedRandomAccessFile(String name, String mode, int size) throws FileNotFoundException { + super(name, mode); + this.init(size); + } + + public BufferedRandomAccessFile(File file, String mode, byte[] buf) throws FileNotFoundException { + super(file, mode); + this.dirty_ = this.closed_ = false; + this.lo_ = this.curr_ = this.hi_ = 0; + this.buff_ = buf; + this.maxHi_ = (long) BuffSz_; + this.hitEOF_ = false; + this.diskPos_ = 0L; + } + + private void init(int size) { + this.dirty_ = this.closed_ = false; + this.lo_ = this.curr_ = this.hi_ = 0; + this.buff_ = (size > BuffSz_) ? new byte[size] : new byte[BuffSz_]; + this.maxHi_ = (long) BuffSz_; + this.hitEOF_ = false; + this.diskPos_ = 0L; + } + + @Override + public void close() throws IOException { + this.flush(); + this.closed_ = true; + super.close(); + } + + /** + * Flush any bytes in the file's buffer that have not yet been written to + * disk. If the file was created read-only, this method is a no-op. + */ + public void flush() throws IOException { + this.flushBuffer(); + } + + /* Flush any dirty bytes in the buffer to disk. */ + private void flushBuffer() throws IOException { + if (this.dirty_) { + if (this.diskPos_ != this.lo_) + super.seek(this.lo_); + int len = (int) (this.curr_ - this.lo_); + super.write(this.buff_, 0, len); + this.diskPos_ = this.curr_; + this.dirty_ = false; + } + } + + /* + * Read at most "this.buff.length" bytes into "this.buff", returning the + * number of bytes read. If the return result is less than + * "this.buff.length", then EOF was read. + */ + private int fillBuffer() throws IOException { + int cnt = 0; + int rem = this.buff_.length; + while (rem > 0) { + int n = super.read(this.buff_, cnt, rem); + if (n < 0) + break; + cnt += n; + rem -= n; + } + if ((cnt < 0) && (this.hitEOF_ = (cnt < this.buff_.length))) { + // make sure buffer that wasn't read is initialized with -1 + Arrays.fill(this.buff_, cnt, this.buff_.length, (byte) 0xff); + } + this.diskPos_ += cnt; + return cnt; + } + + /* + * This method positions this.curr at position pos. + * If pos does not fall in the current buffer, it flushes the + * current buffer and loads the correct one.

+ * + * On exit from this routine this.curr == this.hi iff pos + * is at or past the end-of-file, which can only happen if the file was + * opened in read-only mode. + */ + @Override + public void seek(long pos) throws IOException { + if (pos >= this.hi_ || pos < this.lo_) { + // seeking outside of current buffer -- flush and read + this.flushBuffer(); + this.lo_ = pos & BuffMask_; // start at BuffSz boundary + this.maxHi_ = this.lo_ + (long) this.buff_.length; + if (this.diskPos_ != this.lo_) { + super.seek(this.lo_); + this.diskPos_ = this.lo_; + } + int n = this.fillBuffer(); + this.hi_ = this.lo_ + (long) n; + } else { + // seeking inside current buffer -- no read required + if (pos < this.curr_) { + // if seeking backwards, we must flush to maintain V4 + this.flushBuffer(); + } + } + this.curr_ = pos; + } + + /* + * Does not maintain V4 (i.e. buffer differs from disk contents if previously written to) + * - Assumes no writes were made + * @param pos + * @throws IOException + */ + public void seekUnsafe(long pos) throws IOException { + if (pos >= this.hi_ || pos < this.lo_) { + // seeking outside of current buffer -- flush and read + this.flushBuffer(); + this.lo_ = pos & BuffMask_; // start at BuffSz boundary + this.maxHi_ = this.lo_ + (long) this.buff_.length; + if (this.diskPos_ != this.lo_) { + super.seek(this.lo_); + this.diskPos_ = this.lo_; + } + int n = this.fillBuffer(); + this.hi_ = this.lo_ + (long) n; + } + this.curr_ = pos; + } + + @Override + public long getFilePointer() { + return this.curr_; + } + + @Override + public long length() throws IOException { + return Math.max(this.curr_, super.length()); + } + + @Override + public int read() throws IOException { + if (this.curr_ >= this.hi_) { + // test for EOF + // if (this.hi < this.maxHi) return -1; + if (this.hitEOF_) + return -1; + + // slow path -- read another buffer + this.seek(this.curr_); + if (this.curr_ == this.hi_) + return -1; + } + byte res = this.buff_[(int) (this.curr_ - this.lo_)]; + this.curr_++; + return ((int) res) & 0xFF; // convert byte -> int + } + + public byte read1() throws IOException { + if (this.curr_ >= this.hi_) { + // test for EOF + // if (this.hi < this.maxHi) return -1; + if (this.hitEOF_) + return -1; + + // slow path -- read another buffer + this.seek(this.curr_); + if (this.curr_ == this.hi_) + return -1; + } + byte res = this.buff_[(int) (this.curr_ - this.lo_)]; + this.curr_++; + return res; + } + + @Override + public int read(byte[] b) throws IOException { + return this.read(b, 0, b.length); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (this.curr_ >= this.hi_) { + // test for EOF + // if (this.hi < this.maxHi) return -1; + if (this.hitEOF_) + return -1; + + // slow path -- read another buffer + this.seek(this.curr_); + if (this.curr_ == this.hi_) + return -1; + } + len = Math.min(len, (int) (this.hi_ - this.curr_)); + int buffOff = (int) (this.curr_ - this.lo_); + System.arraycopy(this.buff_, buffOff, b, off, len); + this.curr_ += len; + return len; + } + + public byte readCurrent() throws IOException { + if (this.curr_ >= this.hi_) { + // test for EOF + // if (this.hi < this.maxHi) return -1; + if (this.hitEOF_) + return -1; + + // slow path -- read another buffer + this.seek(this.curr_); + if (this.curr_ == this.hi_) + return -1; + } + byte res = this.buff_[(int) (this.curr_ - this.lo_)]; + return res; + } + + public void writeCurrent(byte b) throws IOException { + if (this.curr_ >= this.hi_) { + if (this.hitEOF_ && this.hi_ < this.maxHi_) { + // at EOF -- bump "hi" + this.hi_++; + } else { + // slow path -- write current buffer; read next one + this.seek(this.curr_); + if (this.curr_ == this.hi_) { + // appending to EOF -- bump "hi" + this.hi_++; + } + } + } + this.buff_[(int) (this.curr_ - this.lo_)] = (byte) b; + this.dirty_ = true; + } + + public void writeUnsafe(int b) throws IOException { + this.buff_[(int) (this.curr_ - this.lo_)] = (byte) b; + this.curr_++; + this.dirty_ = true; + } + + @Override + public void write(int b) throws IOException { + if (this.curr_ >= this.hi_) { + if (this.hitEOF_ && this.hi_ < this.maxHi_) { + // at EOF -- bump "hi" + this.hi_++; + } else { + // slow path -- write current buffer; read next one + this.seek(this.curr_); + if (this.curr_ == this.hi_) { + // appending to EOF -- bump "hi" + this.hi_++; + } + } + } + this.buff_[(int) (this.curr_ - this.lo_)] = (byte) b; + this.curr_++; + this.dirty_ = true; + } + + @Override + public void write(byte[] b) throws IOException { + this.write(b, 0, b.length); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + while (len > 0) { + int n = this.writeAtMost(b, off, len); + off += n; + len -= n; + this.dirty_ = true; + } + } + + /* + * Write at most "len" bytes to "b" starting at position "off", and return + * the number of bytes written. + */ + private int writeAtMost(byte[] b, int off, int len) throws IOException { + if (this.curr_ >= this.hi_) { + if (this.hitEOF_ && this.hi_ < this.maxHi_) { + // at EOF -- bump "hi" + this.hi_ = this.maxHi_; + } else { + // slow path -- write current buffer; read next one + this.seek(this.curr_); + if (this.curr_ == this.hi_) { + // appending to EOF -- bump "hi" + this.hi_ = this.maxHi_; + } + } + } + len = Math.min(len, (int) (this.hi_ - this.curr_)); + int buffOff = (int) (this.curr_ - this.lo_); + System.arraycopy(b, off, this.buff_, buffOff, len); + this.curr_ += len; + return len; + } +} diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/jnbt/streamer/StreamDelegate.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/jnbt/streamer/StreamDelegate.java index 8b4e0dcda..8fdba9446 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/jnbt/streamer/StreamDelegate.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/jnbt/streamer/StreamDelegate.java @@ -2,13 +2,18 @@ package com.fastasyncworldedit.core.jnbt.streamer; import com.sk89q.jnbt.NBTConstants; import com.sk89q.jnbt.NBTInputStream; +import com.sk89q.jnbt.Tag; import com.sk89q.worldedit.internal.util.LogManagerCompat; import org.apache.logging.log4j.Logger; +import javax.annotation.Nullable; import java.io.DataInputStream; import java.io.IOException; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; -@SuppressWarnings({"unchecked", "rawtypes"}) +@SuppressWarnings({"unchecked", "rawtypes", "removal"}) public class StreamDelegate { private static final Logger LOGGER = LogManagerCompat.getLogger(); @@ -16,6 +21,8 @@ public class StreamDelegate { private static final byte[][] ZERO_KEYS = new byte[0][]; private static final StreamDelegate[] ZERO_VALUES = new StreamDelegate[0]; + private Map retained = null; + private byte[] buffer; private byte[][] keys; private StreamDelegate[] values; @@ -25,25 +32,36 @@ public class StreamDelegate { private InfoReader infoReader; private ValueReader valueReader; + private String retainedName = null; + private String currentName = null; + + /** + * Used to read a streamed {@link NBTInputStream} + */ public StreamDelegate() { keys = ZERO_KEYS; values = ZERO_VALUES; } + /** + * Set that keys not added to this StreamDelegate instance should still be retained alongside their value retained. They can + * be accessed via {@link StreamDelegate#getRetained} + */ + public StreamDelegate retainOthers() { + retained = new LinkedHashMap<>(); + return this; + } + public StreamDelegate addAndGetParent(String name) { add(name); return this; } - public StreamDelegate add() { - return add(""); - } - - public StreamDelegate add(String name) { + public StreamDelegate add(@Nullable String name) { return add(name, new StreamDelegate()); } - private StreamDelegate add(String name, StreamDelegate scope) { + private StreamDelegate add(@Nullable String name, StreamDelegate scope) { if (valueReader != null) { LOGGER.warn( "Scope {} | {} may not run, as the stream is only read once, and a value reader is already set", @@ -51,7 +69,7 @@ public class StreamDelegate { scope ); } - byte[] bytes = name.getBytes(NBTConstants.CHARSET); + byte[] bytes = name == null ? new byte[0] : name.getBytes(NBTConstants.CHARSET); int maxSize = bytes.length; byte[][] tmpKeys = new byte[keys.length + 1][]; @@ -96,10 +114,12 @@ public class StreamDelegate { public StreamDelegate get(DataInputStream is) throws IOException { int nameLength = is.readShort() & 0xFFFF; if (nameLength == 0 && keys.length > 0 && keys[0].length == 0) { + currentName = ""; + retainedName = null; return values[0]; } if (nameLength > buffer.length) { - is.skipBytes(nameLength); + setRetained(is, nameLength); return null; } int index = 0; @@ -139,34 +159,59 @@ public class StreamDelegate { continue middle; } } + currentName = new String(key); + retainedName = null; return values[i]; } + currentName = null; + retainedName = new String(Arrays.copyOf(buffer, nameLength), NBTConstants.CHARSET); return null; } } + // fall through } case 1: { byte[] key = keys[index]; if (key.length == nameLength) { int i = 0; + boolean retain = false; for (; nameLength > 0; nameLength--, i++) { byte b = is.readByte(); buffer[i] = b; - if (b != key[i]) { - nameLength--; - break outer; + if (!retain && b != key[i]) { + if (retained == null) { + nameLength--; + break outer; + } + retain = true; } - } - return values[index]; + if (!retain) { + currentName = new String(key); + retainedName = null; + return values[index]; + } + retainedName = new String(Arrays.copyOf(buffer, i), NBTConstants.CHARSET); + return null; } break; } } - is.skipBytes(nameLength); + setRetained(is, nameLength); return null; } + private void setRetained(DataInputStream is, int nameLength) throws IOException { + if (retained == null) { + is.skipBytes(nameLength); + } else { + byte[] nameBytes = new byte[nameLength]; + is.readFully(nameBytes); + retainedName = new String(nameBytes, NBTConstants.CHARSET); + } + currentName = null; + } + public StreamDelegate withLong(LongValueReader valueReader) { return withElem(valueReader); } @@ -220,6 +265,19 @@ public class StreamDelegate { return elemReader; } + @Nullable + public Map getRetained() { + return retained; + } + + public void retain(Tag tag) { + if (retainedName == null) { + throw new IllegalStateException("Retained name null?!"); + } + retained.put(retainedName, tag); + retainedName = null; + } + public void acceptInfo(int length, int type) throws IOException { if (infoReader != null) { infoReader.apply(length, type); @@ -234,4 +292,12 @@ public class StreamDelegate { return false; } + public String getCurrentName() { + return currentName; + } + + public String getRetainedName() { + return retainedName; + } + } diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/math/FastBitSet.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/math/FastBitSet.java index fa7199d83..e163330e2 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/math/FastBitSet.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/math/FastBitSet.java @@ -1,11 +1,12 @@ package com.fastasyncworldedit.core.math; +import java.math.BigInteger; import java.util.Arrays; public class FastBitSet { - private final int size; - private final long[] bits; + private int size; + private long[] bits; public FastBitSet(int size) { this.size = size; @@ -93,6 +94,28 @@ public class FastBitSet { } } + public void expandTo(int newSize, boolean value) { + //System.out.println(newSize); + int newLength = (newSize + 64) >> 6; + if (newLength <= this.bits.length) { + if (this.size > newSize) { + this.size = newSize; + } + return; + } + long[] tmp = new long[newLength]; + if (value) { + Arrays.fill(tmp, -1L); + } + System.arraycopy(bits, 0, tmp, 0, bits.length); + this.bits = tmp; + this.size = newSize; + } + + public void setAll() { + setAll(bits); + } + public boolean get(final int i) { return (bits[i >> 6] & (1L << (i & 0x3F))) != 0; } @@ -197,4 +220,16 @@ public class FastBitSet { } + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < this.bits.length; i++) { + String bits = new StringBuilder(String.format("%064d", new BigInteger(Long.toBinaryString(this.bits[i])))) + .reverse() + .toString(); + builder.append(i * 64).append(":").append(bits).append(" "); + } + return builder.toString(); + } + } diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunk.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunk.java index 126a5cd13..84f98e6d7 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunk.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunk.java @@ -25,7 +25,6 @@ public interface IChunk extends Trimable, IChunkGet, IChunkSet { * * @return the x coordinate of the chunk */ - @Range(from = 0, to = 15) int getX(); /** @@ -33,7 +32,6 @@ public interface IChunk extends Trimable, IChunkGet, IChunkSet { * * @return the z coordinate of the chunk */ - @Range(from = 0, to = 15) int getZ(); /** diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunkGet.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunkGet.java index 8004f7098..53fbe5113 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunkGet.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IChunkGet.java @@ -7,6 +7,7 @@ import com.sk89q.worldedit.math.BlockVector3; import com.sk89q.worldedit.world.biome.BiomeType; import com.sk89q.worldedit.world.block.BaseBlock; import com.sk89q.worldedit.world.block.BlockState; +import com.sk89q.worldedit.world.storage.InvalidFormatException; import javax.annotation.Nullable; import java.util.UUID; diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IQueueExtent.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IQueueExtent.java index a5a29f170..d93821c45 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IQueueExtent.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/IQueueExtent.java @@ -162,4 +162,6 @@ public interface IQueueExtent extends Flushable, Trimable, ICh return filter; } + void addFlushTask(Runnable task); + } diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/ParallelAnvilQueueExtent.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/ParallelAnvilQueueExtent.java new file mode 100644 index 000000000..de7a75888 --- /dev/null +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/ParallelAnvilQueueExtent.java @@ -0,0 +1,279 @@ +package com.fastasyncworldedit.core.queue.implementation; + +import com.fastasyncworldedit.core.Fawe; +import com.fastasyncworldedit.core.FaweCache; +import com.fastasyncworldedit.core.anvil.MCAWorld; +import com.fastasyncworldedit.core.configuration.Settings; +import com.fastasyncworldedit.core.extent.NullExtent; +import com.fastasyncworldedit.core.extent.PassthroughExtent; +import com.fastasyncworldedit.core.extent.clipboard.WorldCopyClipboard; +import com.fastasyncworldedit.core.extent.filter.CountFilter; +import com.fastasyncworldedit.core.extent.filter.DistrFilter; +import com.fastasyncworldedit.core.extent.filter.LinkedFilter; +import com.fastasyncworldedit.core.extent.filter.block.ChunkFilterBlock; +import com.fastasyncworldedit.core.extent.processor.BatchProcessorHolder; +import com.fastasyncworldedit.core.extent.processor.MultiBatchProcessor; +import com.fastasyncworldedit.core.function.mask.BlockMaskBuilder; +import com.fastasyncworldedit.core.internal.exception.FaweException; +import com.fastasyncworldedit.core.queue.Filter; +import com.fastasyncworldedit.core.queue.IQueueChunk; +import com.fastasyncworldedit.core.queue.IQueueExtent; +import com.sk89q.worldedit.MaxChangedBlocksException; +import com.sk89q.worldedit.extent.clipboard.Clipboard; +import com.sk89q.worldedit.function.mask.BlockMask; +import com.sk89q.worldedit.function.mask.ExistingBlockMask; +import com.sk89q.worldedit.function.mask.Mask; +import com.sk89q.worldedit.function.pattern.BlockPattern; +import com.sk89q.worldedit.function.pattern.Pattern; +import com.sk89q.worldedit.internal.util.LogManagerCompat; +import com.sk89q.worldedit.math.BlockVector2; +import com.sk89q.worldedit.math.BlockVector3; +import com.sk89q.worldedit.regions.Region; +import com.sk89q.worldedit.util.Countable; +import com.sk89q.worldedit.world.block.BaseBlock; +import com.sk89q.worldedit.world.block.BlockState; +import com.sk89q.worldedit.world.block.BlockStateHolder; +import com.sk89q.worldedit.world.block.BlockType; +import org.apache.logging.log4j.Logger; + +import java.util.Iterator; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ForkJoinTask; +import java.util.stream.IntStream; + +public class ParallelAnvilQueueExtent extends PassthroughExtent { + + private static final Logger LOGGER = LogManagerCompat.getLogger(); + + private final MCAWorld world; + private final QueueHandler handler; + private final BatchProcessorHolder processor; + private final BatchProcessorHolder postProcessor; + // Array for lazy avoidance of concurrent modification exceptions and needless overcomplication of code (synchronisation is + // not very important) + private final boolean[] faweExceptionReasonsUsed = new boolean[FaweException.Type.values().length]; + private final boolean fastmode; + private int changes; + private int lastException = Integer.MIN_VALUE; + private int exceptionCount = 0; + + public ParallelAnvilQueueExtent(QueueHandler handler, MCAWorld world, boolean fastmode) { + super(handler.getQueue(world, new BatchProcessorHolder(), new BatchProcessorHolder())); + this.world = world; + this.handler = handler; + this.processor = (BatchProcessorHolder) getExtent().getProcessor(); + if (this.processor.getProcessor() instanceof MultiBatchProcessor) { + ((MultiBatchProcessor) this.processor.getProcessor()).setFaweExceptionArray(faweExceptionReasonsUsed); + } + this.postProcessor = (BatchProcessorHolder) getExtent().getPostProcessor(); + if (this.postProcessor.getProcessor() instanceof MultiBatchProcessor) { + ((MultiBatchProcessor) this.postProcessor.getProcessor()).setFaweExceptionArray(faweExceptionReasonsUsed); + } + this.fastmode = fastmode; + } + + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public IQueueExtent getExtent() { + return (IQueueExtent) super.getExtent(); + } + + @Override + public boolean cancel() { + if (super.cancel()) { + processor.setProcessor(new NullExtent(this, FaweCache.MANUAL)); + postProcessor.setPostProcessor(new NullExtent(this, FaweCache.MANUAL)); + return true; + } + return false; + } + + @SuppressWarnings("rawtypes") + private IQueueExtent getNewQueue() { + return handler.getMCAQueue(world, this.processor, this.postProcessor); + } + + @Override + @SuppressWarnings("rawtypes") + public T apply(Region region, T filter, boolean full) { + // The chunks positions to iterate over + final Set chunks = region.getChunks(); + final Iterator chunksIter = chunks.iterator(); + + // Get a pool, to operate on the chunks in parallel + final int size = Math.min(chunks.size(), Settings.settings().QUEUE.PARALLEL_THREADS); + if (size <= 1 && chunksIter.hasNext()) { + BlockVector2 pos = chunksIter.next(); + getExtent().apply(null, filter, region, pos.getX(), pos.getZ(), full); + } else { + final ForkJoinTask[] tasks = IntStream.range(0, size).mapToObj(i -> handler.submit(() -> { + try { + final Filter newFilter = filter.fork(); + // Create a chunk that we will reuse/reset for each operation + final SingleThreadQueueExtent queue = (SingleThreadQueueExtent) getNewQueue(); + queue.setFastMode(fastmode); + queue.setFaweExceptionArray(faweExceptionReasonsUsed); + synchronized (queue) { + try { + ChunkFilterBlock block = null; + + while (true) { + // Get the next chunk posWeakChunk + final int chunkX; + final int chunkZ; + synchronized (chunksIter) { + if (!chunksIter.hasNext()) { + break; + } + final BlockVector2 pos = chunksIter.next(); + chunkX = pos.getX(); + chunkZ = pos.getZ(); + } + block = queue.apply(block, newFilter, region, chunkX, chunkZ, full); + } + queue.flush(); + } catch (Throwable t) { + if (t instanceof FaweException) { + Fawe.handleFaweException(faweExceptionReasonsUsed, (FaweException) t, LOGGER); + } else if (t.getCause() instanceof FaweException) { + Fawe.handleFaweException(faweExceptionReasonsUsed, (FaweException) t.getCause(), LOGGER); + } else { + throw t; + } + } + } + } catch (Throwable e) { + String message = e.getMessage(); + int hash = message != null ? message.hashCode() : 0; + if (lastException != hash) { + lastException = hash; + exceptionCount = 0; + LOGGER.catching(e); + } else if (exceptionCount < Settings.settings().QUEUE.PARALLEL_THREADS) { + exceptionCount++; + LOGGER.warn(message); + } + } + })).toArray(ForkJoinTask[]::new); + // Join filters + for (ForkJoinTask task : tasks) { + if (task != null) { + task.quietlyJoin(); + } + } + filter.join(); + } + return filter; + } + + @Override + public int countBlocks(Region region, Mask searchMask) { + return + // Apply a filter over a region + apply(region, searchMask + .toFilter(new CountFilter()), searchMask.replacesAir()) // Adapt the mask to a filter which counts + .getParent() // Get the counter of this mask + .getTotal(); // Get the total from the counter + } + + @Override + public > int setBlocks(Region region, B block) throws MaxChangedBlocksException { + Mask mask = new BlockMaskBuilder().add(block).build(this).inverse(); + return this.changes = apply(region, mask.toFilter(block), mask.replacesAir()) + .getBlocksApplied(); + } + + @Override + public int setBlocks(Region region, Pattern pattern) throws MaxChangedBlocksException { + return this.changes = apply(region, new LinkedFilter<>(pattern, new CountFilter()), true).getChild().getTotal(); + } + + @Override + public int setBlocks(Set vset, Pattern pattern) { + if (vset instanceof Region) { + this.changes = setBlocks((Region) vset, pattern); + return this.changes; + } + // TODO optimize parallel + for (BlockVector3 blockVector3 : vset) { + if (pattern.apply(this, blockVector3, blockVector3)) { + this.changes++; + } + } + return this.changes; + } + + @Override + public int replaceBlocks(Region region, Mask mask, Pattern pattern) + throws MaxChangedBlocksException { + boolean full = mask.replacesAir(); + return this.changes = apply(region, mask.toFilter(pattern), full).getBlocksApplied(); + } + + @Override + public List> getBlockDistributionWithData(Region region) { + return apply(region, new DistrFilter(), true).getDistribution(); + } + + @Override + public List> getBlockDistribution(Region region) { + return apply(region, new DistrFilter(), true).getTypeDistribution(); + } + + /** + * Lazily copy a region + */ + @Override + public Clipboard lazyCopy(Region region) { + Clipboard clipboard = new WorldCopyClipboard(() -> this, region); + clipboard.setOrigin(region.getMinimumPoint()); + return clipboard; + } + + /** + * Count the number of blocks of a list of types in a region. + * + * @param region the region + * @param searchBlocks the list of blocks to search + * @return the number of blocks that matched the block + */ + @Override + public int countBlocks(Region region, Set searchBlocks) { + Mask mask = new BlockMask(this, searchBlocks); + return countBlocks(region, mask); + } + + /** + * Replaces all the blocks matching a given filter, within a given region, to a block + * returned by a given pattern. + * + * @param region the region to replace the blocks within + * @param filter a list of block types to match, or null to use {@link ExistingBlockMask} + * @param replacement the replacement block + * @return number of blocks affected + * @throws MaxChangedBlocksException thrown if too many blocks are changed + */ + @Override + public > int replaceBlocks(Region region, Set filter, B replacement) throws + MaxChangedBlocksException { + return replaceBlocks(region, filter, new BlockPattern(replacement)); + } + + /** + * Replaces all the blocks matching a given filter, within a given region, to a block + * returned by a given pattern. + * + * @param region the region to replace the blocks within + * @param filter a list of block types to match, or null to use {@link ExistingBlockMask} + * @param pattern the pattern that provides the new blocks + * @return number of blocks affected + * @throws MaxChangedBlocksException thrown if too many blocks are changed + */ + @Override + public int replaceBlocks(Region region, Set filter, Pattern pattern) throws MaxChangedBlocksException { + Mask mask = filter == null ? new ExistingBlockMask(this) : new BlockMask(this, filter); + return replaceBlocks(region, mask, pattern); + } + +} diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/QueueHandler.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/QueueHandler.java index 014b94fce..aeb84f206 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/QueueHandler.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/QueueHandler.java @@ -10,6 +10,7 @@ import com.fastasyncworldedit.core.queue.IChunkSet; import com.fastasyncworldedit.core.queue.IQueueChunk; import com.fastasyncworldedit.core.queue.IQueueExtent; import com.fastasyncworldedit.core.queue.Trimable; +import com.fastasyncworldedit.core.anvil.MCAWorld; import com.fastasyncworldedit.core.queue.implementation.chunk.ChunkCache; import com.fastasyncworldedit.core.util.MemUtil; import com.fastasyncworldedit.core.util.TaskManager; @@ -399,6 +400,26 @@ public abstract class QueueHandler implements Trimable, Runnable { } } + /** + * Get or create the MCA WorldChunkCache for a world + */ + public IChunkCache getOrCreateMCAWorldCache(World world) { + world = WorldWrapper.unwrap(world); + + synchronized (chunkGetCache) { + final WeakReference> ref = chunkGetCache.get(world); + if (ref != null) { + final IChunkCache cached = ref.get(); + if (cached != null) { + return cached; + } + } + final IChunkCache created = new ChunkCache<>(world); + chunkGetCache.put(world, new WeakReference<>(created)); + return created; + } + } + public IQueueExtent create() { return new SingleThreadQueueExtent(); } @@ -455,6 +476,24 @@ public abstract class QueueHandler implements Trimable, Runnable { */ public abstract void endUnsafe(boolean parallel); + public IQueueExtent getMCAQueue(MCAWorld world) { + return getMCAQueue(world, null, null); + } + + public IQueueExtent getMCAQueue(MCAWorld world, IBatchProcessor processor, IBatchProcessor postProcessor) { + final IQueueExtent queue = pool(); + IChunkCache cache = getOrCreateMCAWorldCache(world); + queue.init(world, cache, null); + if (processor != null) { + queue.setProcessor(processor); + } + if (postProcessor != null) { + queue.setPostProcessor(postProcessor); + } + queue.addFlushTask(world::flush); + return queue; + } + /** * Create a new queue for a given world. */ diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/SingleThreadQueueExtent.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/SingleThreadQueueExtent.java index 198782ee3..29d8e9143 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/SingleThreadQueueExtent.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/SingleThreadQueueExtent.java @@ -32,6 +32,8 @@ import com.sk89q.worldedit.world.World; import it.unimi.dsi.fastutil.longs.Long2ObjectLinkedOpenHashMap; import org.apache.logging.log4j.Logger; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -71,6 +73,7 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen private boolean[] faweExceptionReasonsUsed = new boolean[FaweException.Type.values().length]; private int lastException = Integer.MIN_VALUE; private int exceptionCount = 0; + private List flushTasks = null; public SingleThreadQueueExtent() { } @@ -202,6 +205,14 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen return chunks.isEmpty() && submissions.isEmpty(); } + @Override + public void addFlushTask(final Runnable task) { + if (flushTasks == null) { + flushTasks = new ArrayList<>(); + } + flushTasks.add(task); + } + @Override public > V submit(IQueueChunk chunk) { if (lastChunk == chunk) { @@ -473,6 +484,11 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen getChunkLock.unlock(); } pollSubmissions(0, true); + if (flushTasks != null) { + for (Runnable r : flushTasks) { + r.run(); + } + } } @Override diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/blocks/ThreadUnsafeCharBlocks.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/blocks/ThreadUnsafeCharBlocks.java index ca0dd3442..21fb96614 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/blocks/ThreadUnsafeCharBlocks.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/blocks/ThreadUnsafeCharBlocks.java @@ -439,7 +439,7 @@ public class ThreadUnsafeCharBlocks implements IChunkSet, IBlocks { @Override public boolean hasBiomes(int layer) { layer -= minSectionPosition; - return layer >= 0 && layer < biomes.length && biomes[layer] != null && biomes[layer].length > 0; + return layer >= 0 && biomes != null && layer < biomes.length && biomes[layer] != null && biomes[layer].length > 0; } @Override diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/chunk/ChunkHolder.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/chunk/ChunkHolder.java index ec556d845..13291ee8a 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/chunk/ChunkHolder.java +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/queue/implementation/chunk/ChunkHolder.java @@ -1059,7 +1059,10 @@ public class ChunkHolder> implements IQueueChunk { // Do nothing }); } catch (Throwable t) { - calledLock.unlock(); + try { + calledLock.unlock(); + } catch (IllegalMonitorStateException ignored) { + } throw t; } } diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/regions/WorldRegionsRegion.java b/worldedit-core/src/main/java/com/fastasyncworldedit/core/regions/WorldRegionsRegion.java new file mode 100644 index 000000000..144df3271 --- /dev/null +++ b/worldedit-core/src/main/java/com/fastasyncworldedit/core/regions/WorldRegionsRegion.java @@ -0,0 +1,281 @@ +package com.fastasyncworldedit.core.regions; + +import com.fastasyncworldedit.core.anvil.MCAFile; +import com.fastasyncworldedit.core.anvil.MCAWorld; +import com.fastasyncworldedit.core.math.MutableBlockVector3; +import com.fastasyncworldedit.core.util.task.RunnableVal4; +import com.sk89q.worldedit.WorldEdit; +import com.sk89q.worldedit.extension.platform.Capability; +import com.sk89q.worldedit.math.BlockVector2; +import com.sk89q.worldedit.math.BlockVector3; +import com.sk89q.worldedit.regions.Region; +import com.sk89q.worldedit.regions.RegionOperationException; +import com.sk89q.worldedit.world.World; +import org.jetbrains.annotations.Nullable; + +import javax.annotation.Nonnull; +import java.nio.file.Path; +import java.util.AbstractSet; +import java.util.ArrayDeque; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Queue; +import java.util.Set; + +public class WorldRegionsRegion implements Region { + + private final MCAWorld world; + private final BlockVector3 min; + private final BlockVector3 max; + private Set chunks = null; + + public WorldRegionsRegion(@Nonnull final MCAWorld world) { + this.world = Objects.requireNonNull(world); + List regions = world.getRegionFileFiles(); + int minX = Integer.MAX_VALUE; + int minZ = Integer.MAX_VALUE; + int maxX = Integer.MIN_VALUE; + int maxZ = Integer.MIN_VALUE; + for (Path p : regions) { + String[] split = p.getFileName().toString().split("\\."); + int x = Integer.parseInt(split[1]); + int z = Integer.parseInt(split[2]); + minX = Math.min(x, minX); + minZ = Math.min(z, minZ); + maxX = Math.max(x, maxX); + maxZ = Math.max(z, maxZ); + } + this.min = BlockVector3.at( + minX, + WorldEdit.getInstance().getPlatformManager().queryCapability(Capability.WORLD_EDITING).versionMinY(), + minZ + ); + this.max = BlockVector3.at( + maxX, + WorldEdit.getInstance().getPlatformManager().queryCapability(Capability.WORLD_EDITING).versionMaxY(), + maxZ + ); + } + + @Override + public Iterator iterator() { + Queue queue = new ArrayDeque<>(getChunks()); + return new Iterator<>() { + private final int by = min.getY(); + private final int ty = max.getY(); + private final MutableBlockVector3 mutable = new MutableBlockVector3(); + + private BlockVector2 chunk = queue.poll(); + private int cx = chunk.getX() << 4; + private int cz = chunk.getZ() << 4; + private int x; + private int y; + private int z; + + @Override + public boolean hasNext() { + return x < 15 || y < ty || z < 15 || queue.peek() != null; + } + + @Override + public MutableBlockVector3 next() { + int curX = x; + int curY = y; + int curZ = z; + if (++x > 15) { + if (++z > 15) { + if (++y > ty) { + if (!hasNext()) { + throw new NoSuchElementException("End of iterator") { + @Override + public Throwable fillInStackTrace() { + return this; + } + }; + } + chunk = queue.poll(); + x = 0; + y = by; + z = 0; + cx = chunk.getX() << 4; + cz = chunk.getZ() << 4; + return mutable.setComponents(cx + x, y, cz + z); + } else { + x = 0; + z = 0; + } + } else { + x = 0; + } + } + return mutable.setComponents(cx + curX, curY, cz + curZ); + } + }; + } + + @Override + public void setWorld(final World world) { + throw new UnsupportedOperationException("Cannot modify WorldRegionsRegion - Immutable"); + } + + @Override + public Region clone() { + return new WorldRegionsRegion(world); + } + + @Override + public List polygonize(final int maxPoints) { + return null; + } + + @Override + public void shift(final BlockVector3 change) throws RegionOperationException { + throw new UnsupportedOperationException("Cannot modify WorldRegionsRegion - Immutable"); + } + + @Override + public BlockVector3 getMinimumPoint() { + return min; + } + + @Override + public BlockVector3 getMaximumPoint() { + return max; + } + + /** + * Get X-size. + * + * @return width + */ + @Override + public int getWidth() { + BlockVector3 min = getMinimumPoint(); + BlockVector3 max = getMaximumPoint(); + + return max.getX() - min.getX() + 1; + } + + /** + * Get Y-size. + * + * @return height + */ + @Override + public int getHeight() { + BlockVector3 min = getMinimumPoint(); + BlockVector3 max = getMaximumPoint(); + + return max.getY() - min.getY() + 1; + } + + /** + * Get Z-size. + * + * @return length + */ + @Override + public int getLength() { + BlockVector3 min = getMinimumPoint(); + BlockVector3 max = getMaximumPoint(); + + return max.getZ() - min.getZ() + 1; + } + + @Override + public void expand(final BlockVector3... changes) throws RegionOperationException { + throw new UnsupportedOperationException("Cannot modify WorldRegionsRegion - Immutable"); + } + + @Override + public void contract(final BlockVector3... changes) throws RegionOperationException { + throw new UnsupportedOperationException("Cannot modify WorldRegionsRegion - Immutable"); + } + + @Override + public boolean contains(final BlockVector3 position) { + return false; + } + + @Override + public Set getChunks() { + if (chunks == null) { + synchronized (this) { + if (chunks != null) { + return chunks; + } + Set tmp = new HashSet<>(); + for (MCAFile mca : world.getMCAs()) { + mca.forEachChunk(new RunnableVal4<>() { + @Override + public void run(Integer x, Integer z, Integer offset, Integer size) { + if (offset != 0 && size > 0) { + tmp.add(BlockVector2.at(x, z)); + } + } + }); + } + chunks = tmp; + } + } + return chunks; + } + + @Override + public Set getChunkCubes() { + return new AbstractSet<>() { + @Override + public Iterator iterator() { + Queue chunks = new ArrayDeque<>(getChunks()); + + return new Iterator<>() { + private final MutableBlockVector3 mutable = new MutableBlockVector3(); + private final int by = min.getY() >> 4; + private final int ty = max.getY() >> 4; + + private BlockVector2 chunk = chunks.poll(); + private int y; + + @Override + public boolean hasNext() { + return y < ty || chunks.peek() != null; + } + + @Override + public BlockVector3 next() { + int curY = y; + if (++y > ty) { + if (!hasNext()) { + throw new NoSuchElementException("End of iterator") { + @Override + public Throwable fillInStackTrace() { + return this; + } + }; + } + y = by; + chunk = chunks.poll(); + return mutable.setComponents(chunk.getX(), y, chunk.getZ()); + } + return mutable.setComponents(chunk.getX(), curY, chunk.getZ()); + } + }; + } + + @Override + public int size() { + return getChunks().size() * ((max.getY() >> 4) - (min.getY() >> 4)); + } + }; + } + + @Nullable + @Override + public World getWorld() { + return null; + } + +} diff --git a/worldedit-core/src/main/java/com/sk89q/jnbt/NBTInputStream.java b/worldedit-core/src/main/java/com/sk89q/jnbt/NBTInputStream.java index fca025212..0eb97da8d 100644 --- a/worldedit-core/src/main/java/com/sk89q/jnbt/NBTInputStream.java +++ b/worldedit-core/src/main/java/com/sk89q/jnbt/NBTInputStream.java @@ -46,6 +46,7 @@ import java.util.Map; * * @deprecated JNBT is being removed for adventure-nbt in WorldEdit 8. */ +@SuppressWarnings("removal") @Deprecated(forRemoval = true) public final class NBTInputStream implements Closeable { @@ -61,6 +62,7 @@ public final class NBTInputStream implements Closeable { this.is = new DataInputStream(is); } + //FAWE start public NBTInputStream(DataInputStream dis) { this.is = dis; } @@ -73,6 +75,7 @@ public final class NBTInputStream implements Closeable { is.reset(); } + //FAWE end /** * Reads an NBT tag from the stream. * @@ -80,6 +83,7 @@ public final class NBTInputStream implements Closeable { * @throws IOException if an I/O error occurs. */ public NamedTag readNamedTag() throws IOException { + //FAWE start return readNamedTag(0); } @@ -100,7 +104,7 @@ public final class NBTInputStream implements Closeable { return readTagPayload(type, 0); } - public void readNamedTagLazy(StreamDelegate scope) throws IOException { + public void readNamedTagLazy(StreamDelegate scope) { try { int type = is.readByte(); if (type == NBTConstants.TYPE_END) { @@ -111,19 +115,30 @@ public final class NBTInputStream implements Closeable { if (child != null) { child.acceptRoot(this, type, 0); } else { - readTagPayloadLazy(type, 0); + readTagPayloadLazy(type, 0, scope, scope.getRetained() != null); } } catch (Throwable e) { e.printStackTrace(); } } + public void readNamedTagLazyExceptionally(StreamDelegate scope) throws IOException { + int type = is.readByte(); + if (type == NBTConstants.TYPE_END) { + return; + } + + StreamDelegate child = scope.get(is); + if (child != null) { + child.acceptRoot(this, type, 0); + } else { + readTagPayloadLazy(type, 0, scope, scope.getRetained() != null); + } + } + public String readNamedTagName(int type) throws IOException { if (type != NBTConstants.TYPE_END) { - int nameLength = is.readShort() & 0xFFFF; - byte[] nameBytes = new byte[nameLength]; - is.readFully(nameBytes); - return new String(nameBytes, NBTConstants.CHARSET); + return is.readUTF(); } else { return ""; } @@ -131,73 +146,104 @@ public final class NBTInputStream implements Closeable { private byte[] buf; - public void readTagPayloadLazy(int type, int depth) throws IOException { + public void readTagPayloadLazy(int type, int depth, StreamDelegate scope, boolean retain) throws IOException { + int length; switch (type) { - case NBTConstants.TYPE_END: - return; - case NBTConstants.TYPE_BYTE: - is.skipBytes(1); - return; - case NBTConstants.TYPE_SHORT: - is.skipBytes(2); - return; - case NBTConstants.TYPE_INT: - is.skipBytes(4); - return; - case NBTConstants.TYPE_LONG: - is.skipBytes(8); - return; - case NBTConstants.TYPE_FLOAT: - is.skipBytes(4); - return; - case NBTConstants.TYPE_DOUBLE: - is.skipBytes(8); - return; - case NBTConstants.TYPE_STRING: - int length = is.readShort() & 0xFFFF; - is.skipBytes(length); - return; - case NBTConstants.TYPE_BYTE_ARRAY: - is.skipBytes(is.readInt()); - return; - case NBTConstants.TYPE_LIST: { - int childType = is.readByte(); - length = is.readInt(); - for (int i = 0; i < length; ++i) { - readTagPayloadLazy(childType, depth + 1); - } - return; + case NBTConstants.TYPE_END -> { } - case NBTConstants.TYPE_COMPOUND: { - // readDataPayload - depth++; - while (true) { + case NBTConstants.TYPE_BYTE -> { + if (!retain) { + is.skipBytes(1); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_SHORT -> { + if (!retain) { + is.skipBytes(2); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_INT, NBTConstants.TYPE_FLOAT -> { + if (!retain) { + is.skipBytes(4); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_LONG, NBTConstants.TYPE_DOUBLE -> { + if (!retain) { + is.skipBytes(8); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_STRING -> { + length = is.readShort() & 0xFFFF; + if (!retain) { + is.skipBytes(length); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_BYTE_ARRAY -> { + if (!retain) { + is.skipBytes(is.readInt()); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + case NBTConstants.TYPE_LIST -> { + if (!retain) { int childType = is.readByte(); - if (childType == NBTConstants.TYPE_END) { - return; + length = is.readInt(); + for (int i = 0; i < length; ++i) { + readTagPayloadLazy(childType, depth + 1, scope, retain); } - is.skipBytes(is.readShort() & 0xFFFF); - readTagPayloadLazy(childType, depth + 1); + } else { + scope.retain(readTagPayload(type, depth)); } } - case NBTConstants.TYPE_INT_ARRAY: { - is.skipBytes(is.readInt() << 2); - return; + case NBTConstants.TYPE_COMPOUND -> { + if (!retain) { + // readDataPayload + depth++; + while (true) { + int childType = is.readByte(); + if (childType == NBTConstants.TYPE_END) { + return; + } + is.skipBytes(is.readShort() & 0xFFFF); + readTagPayloadLazy(childType, depth + 1, scope, retain); + } + } else { + scope.retain(readTagPayload(type, depth)); + } } - case NBTConstants.TYPE_LONG_ARRAY: { - is.skipBytes(is.readInt() << 3); - return; + case NBTConstants.TYPE_INT_ARRAY -> { + if (!retain) { + is.skipBytes(is.readInt() << 2); + } else { + scope.retain(readTagPayload(type, depth)); + } } - default: - throw new IOException("Invalid tag type: " + type + "."); + case NBTConstants.TYPE_LONG_ARRAY -> { + if (!retain) { + is.skipBytes(is.readInt() << 3); + } else { + scope.retain(readTagPayload(type, depth)); + } + } + default -> throw new IOException("Invalid tag type: " + type + "."); } } public void readTagPayloadLazy(int type, int depth, StreamDelegate scope) throws IOException { switch (type) { - case NBTConstants.TYPE_END: - return; - case NBTConstants.TYPE_BYTE: { + case NBTConstants.TYPE_END -> { + } + case NBTConstants.TYPE_BYTE -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -207,9 +253,8 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(1); } - return; } - case NBTConstants.TYPE_SHORT: { + case NBTConstants.TYPE_SHORT -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -219,9 +264,8 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(2); } - return; } - case NBTConstants.TYPE_INT: { + case NBTConstants.TYPE_INT -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -231,9 +275,8 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(4); } - return; } - case NBTConstants.TYPE_LONG: { + case NBTConstants.TYPE_LONG -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -243,9 +286,8 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(8); } - return; } - case NBTConstants.TYPE_FLOAT: { + case NBTConstants.TYPE_FLOAT -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -255,9 +297,8 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(4); } - return; } - case NBTConstants.TYPE_DOUBLE: { + case NBTConstants.TYPE_DOUBLE -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); @@ -267,24 +308,20 @@ public final class NBTInputStream implements Closeable { } else { is.skipBytes(8); } - return; } - case NBTConstants.TYPE_STRING: { + case NBTConstants.TYPE_STRING -> { ValueReader value = scope.getValueReader(); if (value == null) { value = scope.getElemReader(); } - int length = is.readShort() & 0xFFFF; if (value != null) { - byte[] bytes = new byte[length]; - is.readFully(bytes); - value.apply(0, new String(bytes, NBTConstants.CHARSET)); + value.apply(0, is.readUTF()); } else { + int length = is.readShort() & 0xFFFF; is.skipBytes(length); } - return; } - case NBTConstants.TYPE_LIST: { + case NBTConstants.TYPE_LIST -> { int childType = is.readByte(); int length = is.readInt(); StreamDelegate child; @@ -305,16 +342,15 @@ public final class NBTInputStream implements Closeable { child = scope.get0(); if (child == null) { for (int i = 0; i < length; ++i) { - readTagPayloadLazy(childType, depth + 1); + readTagPayloadLazy(childType, depth + 1, scope, scope.getRetained() != null); } } else { for (int i = 0; i < length; ++i) { readTagPayloadLazy(childType, depth + 1, child); } } - return; } - case NBTConstants.TYPE_COMPOUND: { + case NBTConstants.TYPE_COMPOUND -> { // readDataPayload scope.acceptInfo(-1, NBTConstants.TYPE_BYTE); ValueReader valueReader = scope.getValueReader(); @@ -341,14 +377,23 @@ public final class NBTInputStream implements Closeable { return; } StreamDelegate child = scope.get(is); - if (child == null) { - readTagPayloadLazy(childType, depth + 1); - } else { - readTagPayloadLazy(childType, depth + 1, child); + try { + if (child == null) { + readTagPayloadLazy(childType, depth + 1, scope, scope.getRetained() != null); + } else { + readTagPayloadLazy(childType, depth + 1, child); + } + } catch (IOException e) { + String cur = scope.getCurrentName() == null ? scope.getRetainedName() : scope.getCurrentName(); + if (cur != null) { + throw new IOException("Error reading child scope: `" + scope.getCurrentName() + "`", e); + } else { + throw e; + } } } } - case NBTConstants.TYPE_BYTE_ARRAY: { + case NBTConstants.TYPE_BYTE_ARRAY -> { int length = is.readInt(); scope.acceptInfo(length, NBTConstants.TYPE_BYTE); if (scope.acceptLazy(length, this)) { @@ -383,9 +428,8 @@ public final class NBTInputStream implements Closeable { return; } is.skipBytes(length); - return; } - case NBTConstants.TYPE_INT_ARRAY: { + case NBTConstants.TYPE_INT_ARRAY -> { int length = is.readInt(); scope.acceptInfo(length, NBTConstants.TYPE_INT); if (scope.acceptLazy(length, this)) { @@ -404,9 +448,8 @@ public final class NBTInputStream implements Closeable { return; } is.skipBytes(length << 2); - return; } - case NBTConstants.TYPE_LONG_ARRAY: { + case NBTConstants.TYPE_LONG_ARRAY -> { int length = is.readInt(); scope.acceptInfo(length, NBTConstants.TYPE_LONG); if (scope.acceptLazy(length, this)) { @@ -425,11 +468,8 @@ public final class NBTInputStream implements Closeable { return; } is.skipBytes(length << 3); - return; } - - default: - throw new IOException("Invalid tag type: " + type + "."); + default -> throw new IOException("Invalid tag type: " + type + "."); } } @@ -446,65 +486,60 @@ public final class NBTInputStream implements Closeable { } public static int getSize(int type) { - switch (type) { - default: - case NBTConstants.TYPE_END: - case NBTConstants.TYPE_BYTE: - return 1; - case NBTConstants.TYPE_BYTE_ARRAY: - case NBTConstants.TYPE_STRING: - case NBTConstants.TYPE_LIST: - case NBTConstants.TYPE_COMPOUND: - case NBTConstants.TYPE_INT_ARRAY: - case NBTConstants.TYPE_LONG_ARRAY: - case NBTConstants.TYPE_SHORT: - return 2; - case NBTConstants.TYPE_FLOAT: - case NBTConstants.TYPE_INT: - return 4; - case NBTConstants.TYPE_DOUBLE: - case NBTConstants.TYPE_LONG: - return 8; - } + return switch (type) { + default -> 1; + case NBTConstants.TYPE_BYTE_ARRAY, NBTConstants.TYPE_STRING, NBTConstants.TYPE_LIST, NBTConstants.TYPE_COMPOUND, NBTConstants.TYPE_INT_ARRAY, NBTConstants.TYPE_LONG_ARRAY, NBTConstants.TYPE_SHORT -> + 2; + case NBTConstants.TYPE_FLOAT, NBTConstants.TYPE_INT -> 4; + case NBTConstants.TYPE_DOUBLE, NBTConstants.TYPE_LONG -> 8; + }; } public Object readTagPayloadRaw(int type, int depth) throws IOException { + int length; + byte[] bytes; switch (type) { - case NBTConstants.TYPE_END: + case NBTConstants.TYPE_END -> { if (depth == 0) { throw new IOException( "TAG_End found without a TAG_Compound/TAG_List tag preceding it."); } else { return null; } - case NBTConstants.TYPE_BYTE: + } + case NBTConstants.TYPE_BYTE -> { return (is.readByte()); - case NBTConstants.TYPE_SHORT: + } + case NBTConstants.TYPE_SHORT -> { return (is.readShort()); - case NBTConstants.TYPE_INT: + } + case NBTConstants.TYPE_INT -> { return (is.readInt()); - case NBTConstants.TYPE_LONG: + } + case NBTConstants.TYPE_LONG -> { return (is.readLong()); - case NBTConstants.TYPE_FLOAT: + } + case NBTConstants.TYPE_FLOAT -> { return (is.readFloat()); - case NBTConstants.TYPE_DOUBLE: + } + case NBTConstants.TYPE_DOUBLE -> { return (is.readDouble()); - case NBTConstants.TYPE_BYTE_ARRAY: - int length = is.readInt(); - byte[] bytes = new byte[length]; - is.readFully(bytes); - return (bytes); - case NBTConstants.TYPE_STRING: - length = is.readShort() & 0xFFFF; + } + case NBTConstants.TYPE_BYTE_ARRAY -> { + length = is.readInt(); bytes = new byte[length]; is.readFully(bytes); - return (new String(bytes, NBTConstants.CHARSET)); - case NBTConstants.TYPE_LIST: { + return (bytes); + } + case NBTConstants.TYPE_STRING -> { + return is.readUTF(); + } + case NBTConstants.TYPE_LIST -> { int childType = is.readByte(); length = is.readInt(); return readListRaw(depth, childType, length); } - case NBTConstants.TYPE_COMPOUND: { + case NBTConstants.TYPE_COMPOUND -> { Map tagMap = new HashMap<>(); while (true) { int childType = is.readByte(); @@ -516,16 +551,15 @@ public final class NBTInputStream implements Closeable { tagMap.put(name, value); } } - case NBTConstants.TYPE_INT_ARRAY: { + case NBTConstants.TYPE_INT_ARRAY -> { length = is.readInt(); return readIntArrayRaw(length); } - case NBTConstants.TYPE_LONG_ARRAY: { + case NBTConstants.TYPE_LONG_ARRAY -> { length = is.readInt(); return readLongArrayRaw(length); } - default: - throw new IOException("Invalid tag type: " + type + "."); + default -> throw new IOException("Invalid tag type: " + type + "."); } } @@ -541,7 +575,7 @@ public final class NBTInputStream implements Closeable { for (int i = 0; i < toRead; i += 4, index++) { data[index] = ((buf[i] & 0xFF) << 24) + ((buf[i + 1] & 0xFF) << 16) + ((buf[i + 2] & 0xFF) << 8) + (buf[i + 3] & 0xFF); } - length -= toRead; + length -= (toRead >> 2); } return data; } @@ -558,7 +592,7 @@ public final class NBTInputStream implements Closeable { for (int i = 0; i < toRead; i += 8, index++) { data[index] = (((long) buf[i] << 56) | ((long) (buf[i + 1] & 255) << 48) | ((long) (buf[i + 2] & 255) << 40) | ((long) (buf[i + 3] & 255) << 32) | ((long) (buf[i + 4] & 255) << 24) | ((buf[i + 5] & 255) << 16) | ((buf[i + 6] & 255) << 8) | (buf[i + 7] & 255)); } - length -= toRead; + length -= (toRead >> 3); } return (data); } @@ -571,41 +605,48 @@ public final class NBTInputStream implements Closeable { * @return the tag * @throws IOException if an I/O error occurs. */ - private Tag readTagPayload(int type, int depth) throws IOException { + public Tag readTagPayload(int type, int depth) throws IOException { + int length; + byte[] bytes; switch (type) { - case NBTConstants.TYPE_END: + case NBTConstants.TYPE_END -> { if (depth == 0) { throw new IOException( "TAG_End found without a TAG_Compound/TAG_List tag preceding it."); } else { return new EndTag(); } - case NBTConstants.TYPE_BYTE: + } + case NBTConstants.TYPE_BYTE -> { return new ByteTag(is.readByte()); - case NBTConstants.TYPE_SHORT: + } + case NBTConstants.TYPE_SHORT -> { return new ShortTag(is.readShort()); - case NBTConstants.TYPE_INT: + } + case NBTConstants.TYPE_INT -> { return new IntTag(is.readInt()); - case NBTConstants.TYPE_LONG: + } + case NBTConstants.TYPE_LONG -> { return new LongTag(is.readLong()); - case NBTConstants.TYPE_FLOAT: + } + case NBTConstants.TYPE_FLOAT -> { return new FloatTag(is.readFloat()); - case NBTConstants.TYPE_DOUBLE: + } + case NBTConstants.TYPE_DOUBLE -> { return new DoubleTag(is.readDouble()); - case NBTConstants.TYPE_BYTE_ARRAY: - int length = is.readInt(); - byte[] bytes = new byte[length]; - is.readFully(bytes); - return new ByteArrayTag(bytes); - case NBTConstants.TYPE_STRING: - length = is.readShort() & 0xFFFF; + } + case NBTConstants.TYPE_BYTE_ARRAY -> { + length = is.readInt(); bytes = new byte[length]; is.readFully(bytes); - return new StringTag(new String(bytes, NBTConstants.CHARSET)); - case NBTConstants.TYPE_LIST: + return new ByteArrayTag(bytes); + } + case NBTConstants.TYPE_STRING -> { + return new StringTag(is.readUTF()); + } + case NBTConstants.TYPE_LIST -> { int childType = is.readByte(); length = is.readInt(); - List tagList = new ArrayList<>(); for (int i = 0; i < length; ++i) { Tag tag = readTagPayload(childType, depth + 1); @@ -614,9 +655,9 @@ public final class NBTInputStream implements Closeable { } tagList.add(tag); } - return new ListTag(NBTUtils.getTypeClass(childType), tagList); - case NBTConstants.TYPE_COMPOUND: + } + case NBTConstants.TYPE_COMPOUND -> { Map tagMap = new HashMap<>(); while (true) { NamedTag namedTag = readNamedTag(depth + 1); @@ -627,24 +668,25 @@ public final class NBTInputStream implements Closeable { tagMap.put(namedTag.getName(), tag); } } - return new CompoundTag(tagMap); - case NBTConstants.TYPE_INT_ARRAY: + } + case NBTConstants.TYPE_INT_ARRAY -> { length = is.readInt(); int[] data = new int[length]; for (int i = 0; i < length; i++) { data[i] = is.readInt(); } return new IntArrayTag(data); - case NBTConstants.TYPE_LONG_ARRAY: + } + case NBTConstants.TYPE_LONG_ARRAY -> { length = is.readInt(); long[] longData = new long[length]; for (int i = 0; i < length; i++) { longData[i] = is.readLong(); } return new LongArrayTag(longData); - default: - throw new IOException("Invalid tag type: " + type + "."); + } + default -> throw new IOException("Invalid tag type: " + type + "."); } } diff --git a/worldedit-core/src/main/java/com/sk89q/jnbt/NBTOutputStream.java b/worldedit-core/src/main/java/com/sk89q/jnbt/NBTOutputStream.java index 6342455eb..1d5fa34b5 100644 --- a/worldedit-core/src/main/java/com/sk89q/jnbt/NBTOutputStream.java +++ b/worldedit-core/src/main/java/com/sk89q/jnbt/NBTOutputStream.java @@ -110,9 +110,7 @@ public final class NBTOutputStream extends OutputStream implements Closeable, Da checkNotNull(value); int type = NBTConstants.TYPE_STRING; writeNamedTagName(name, type); - byte[] bytes = value.getBytes(NBTConstants.CHARSET); - os.writeShort(bytes.length); - os.write(bytes); + os.writeUTF(value); } public void writeNamedTag(String name, int value) throws IOException { @@ -161,6 +159,16 @@ public final class NBTOutputStream extends OutputStream implements Closeable, Da } } + public void writeNamedTag(String name, long[] data) throws IOException { + checkNotNull(name); + int type = NBTConstants.TYPE_LONG_ARRAY; + writeNamedTagName(name, type); + os.writeInt(data.length); + for (long aData : data) { + os.writeLong(aData); + } + } + public void writeNamedEmptyList(String name) throws IOException { writeNamedEmptyList(name, NBTConstants.TYPE_COMPOUND); } @@ -181,10 +189,13 @@ public final class NBTOutputStream extends OutputStream implements Closeable, Da } public void writeLazyCompoundTag(String name, LazyWrite next) throws IOException { - byte[] nameBytes = name.getBytes(NBTConstants.CHARSET); os.writeByte(NBTConstants.TYPE_COMPOUND); - os.writeShort(nameBytes.length); - os.write(nameBytes); + os.writeUTF(name); + next.write(this); + os.writeByte(NBTConstants.TYPE_END); + } + + public void writeLazyListedCompoundTag(LazyWrite next) throws IOException { next.write(this); os.writeByte(NBTConstants.TYPE_END); } @@ -325,9 +336,7 @@ public final class NBTOutputStream extends OutputStream implements Closeable, Da * @throws IOException if an I/O error occurs. */ private void writeStringTagPayload(StringTag tag) throws IOException { - byte[] bytes = tag.getValue().getBytes(NBTConstants.CHARSET); - os.writeShort(bytes.length); - os.write(bytes); + os.writeUTF(tag.getValue()); } /** diff --git a/worldedit-core/src/main/java/com/fastasyncworldedit/core/command/AnvilCommands.java b/worldedit-core/src/main/java/com/sk89q/worldedit/command/AnvilCommands.java similarity index 92% rename from worldedit-core/src/main/java/com/fastasyncworldedit/core/command/AnvilCommands.java rename to worldedit-core/src/main/java/com/sk89q/worldedit/command/AnvilCommands.java index 5c5ad228c..8ce6d9696 100644 --- a/worldedit-core/src/main/java/com/fastasyncworldedit/core/command/AnvilCommands.java +++ b/worldedit-core/src/main/java/com/sk89q/worldedit/command/AnvilCommands.java @@ -1,12 +1,24 @@ -package com.fastasyncworldedit.core.command; +package com.sk89q.worldedit.command; +import com.fastasyncworldedit.core.Fawe; +import com.fastasyncworldedit.core.anvil.MCAChunk; +import com.fastasyncworldedit.core.anvil.MCAWorld; +import com.fastasyncworldedit.core.configuration.Caption; +import com.fastasyncworldedit.core.queue.IQueueChunk; +import com.fastasyncworldedit.core.queue.IQueueExtent; +import com.fastasyncworldedit.core.regions.WorldRegionsRegion; +import com.fastasyncworldedit.core.util.MaskTraverser; import com.sk89q.worldedit.EditSession; +import com.sk89q.worldedit.EditSessionBuilder; import com.sk89q.worldedit.LocalSession; import com.sk89q.worldedit.WorldEdit; import com.sk89q.worldedit.WorldEditException; import com.sk89q.worldedit.command.util.CommandPermissions; import com.sk89q.worldedit.command.util.CommandPermissionsConditionGenerator; import com.sk89q.worldedit.entity.Player; +import com.sk89q.worldedit.extension.platform.Actor; +import com.sk89q.worldedit.function.mask.AbstractExtentMask; +import com.sk89q.worldedit.function.mask.Mask; import com.sk89q.worldedit.function.pattern.Pattern; import com.sk89q.worldedit.internal.annotation.Selection; import com.sk89q.worldedit.regions.Region; @@ -20,13 +32,6 @@ import java.io.IOException; import static com.google.common.base.Preconditions.checkNotNull; -/** - * @deprecated Anvil classes were used on versions prior to 1.13 to trim chunks. - * The way how it's been done was unsafe and led to issues back the years, hence it - * hasn't been implemented in any modern version. Therefore the current - * implementation is deprecated for removal without replacement. - */ -@Deprecated @CommandContainer(superTypes = CommandPermissionsConditionGenerator.Registration.class) public class AnvilCommands { @@ -116,57 +121,22 @@ public class AnvilCommands { // } @Command( - name = "replaceall", - aliases = {"rea", "repall"}, - desc = "Replace all blocks in the selection with another" - ) + name = "replaceall", aliases = {"rea", "repall"}, desc = "Replace all blocks in the selection with another") @CommandPermissions("worldedit.anvil.replaceall") public void replaceAll( - Player player, String folder, - @Arg(name = "from", desc = "String", def = "") - String fromPattern, - String toPatternStr, - @Switch(name = 'd', desc = "Disable wildcard data matching") - boolean useData + Actor actor, + @Arg(name = "world", desc = "Unloaded world") MCAWorld world, + @Arg(name = "from", desc = "Mask") Mask from, + @Arg(name = "to", desc = "Pattern") Pattern to ) throws WorldEditException { - // final FaweBlockMatcher matchFrom; TODO NOT IMPLEMENTED - // if (from == null) { - // matchFrom = FaweBlockMatcher.NOT_AIR; - // } else { - // if (from.contains(":")) { - // useData = true; //override d flag, if they specified data they want it - // } - // matchFrom = FaweBlockMatcher.fromBlocks(worldEdit.getBlocks(player, from, true), useData); - // } - // final FaweBlockMatcher matchTo = FaweBlockMatcher.setBlocks(worldEdit.getBlocks(player, to, true)); - // ReplaceSimpleFilter filter = new ReplaceSimpleFilter(matchFrom, matchTo); - // ReplaceSimpleFilter result = runWithWorld(player, folder, filter, true); - // if (result != null) player.print(Caption.of("fawe.worldedit.visitor.visitor.block", (result.getTotal()))); + IQueueExtent queueExtent = Fawe.instance().getQueueHandler().getMCAQueue(world); + new MaskTraverser(from).setNewExtent(queueExtent); + queueExtent.replaceBlocks(new WorldRegionsRegion(world), from, to); + queueExtent.flush(); + actor.print(Caption.of("fawe.worldedit.anvil.replaceall.complete")); } - @Command( - name = "remapall", - descFooter = "Remap the world between MCPE/PC values", - desc = "Remap the world between MCPE/PC values" - ) - @CommandPermissions("worldedit.anvil.remapall") - public void remapall(Player player, String folder) throws WorldEditException { - // ClipboardRemapper.RemapPlatform from; TODO NOT IMPLEMENTED - // ClipboardRemapper.RemapPlatform to; - // from = ClipboardRemapper.RemapPlatform.PE; - // to = ClipboardRemapper.RemapPlatform.PC; - // RemapFilter filter = new RemapFilter(from, to); - // RemapFilter result = runWithWorld(player, folder, filter, true); - // if (result != null) { - // player.print(Caption.of("fawe.worldedit.visitor.visitor.block", (result.getTotal()))); - // } - } - - - @Command( - name = "deleteallunvisited", - aliases = {"delunvisited"}, - desc = "Delete all chunks which haven't been occupied", + @Command(name = "deleteallunvisited", aliases = {"delunvisited"}, desc = "Delete all chunks which haven't been occupied", descFooter = "occupied for `age-ticks` (20t = 1s) and \n" + "Have not been accessed since `file-duration` (ms) after creation and\n" + "Have not been used in the past `chunk-inactivity` (ms)" diff --git a/worldedit-core/src/main/java/com/sk89q/worldedit/command/argument/MCAWorldConverter.java b/worldedit-core/src/main/java/com/sk89q/worldedit/command/argument/MCAWorldConverter.java new file mode 100644 index 000000000..517645a07 --- /dev/null +++ b/worldedit-core/src/main/java/com/sk89q/worldedit/command/argument/MCAWorldConverter.java @@ -0,0 +1,96 @@ +/* + * WorldEdit, a Minecraft world manipulation toolkit + * Copyright (C) sk89q + * Copyright (C) WorldEdit team and contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package com.sk89q.worldedit.command.argument; + +import com.fastasyncworldedit.core.Fawe; +import com.fastasyncworldedit.core.anvil.MCAWorld; +import com.sk89q.worldedit.WorldEdit; +import com.sk89q.worldedit.extension.platform.Capability; +import com.sk89q.worldedit.internal.util.LogManagerCompat; +import com.sk89q.worldedit.util.formatting.text.Component; +import com.sk89q.worldedit.util.formatting.text.TextComponent; +import com.sk89q.worldedit.world.World; +import org.apache.logging.log4j.Logger; +import org.enginehub.piston.CommandManager; +import org.enginehub.piston.converter.ArgumentConverter; +import org.enginehub.piston.converter.ConversionResult; +import org.enginehub.piston.converter.FailedConversion; +import org.enginehub.piston.converter.SuccessfulConversion; +import org.enginehub.piston.inject.InjectedValueAccess; +import org.enginehub.piston.inject.Key; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class MCAWorldConverter implements ArgumentConverter { + + private static final Logger LOGGER = LogManagerCompat.getLogger(); + + public static void register(CommandManager commandManager) { + commandManager.registerConverter(Key.of(MCAWorld.class), WORLD_CONVERTER); + } + + //FAWE start - Accessed by LocationConverter + public static final MCAWorldConverter WORLD_CONVERTER = new MCAWorldConverter(); + //FAWE end + + private final TextComponent choices; + + private MCAWorldConverter() { + this.choices = TextComponent.of("any world"); + } + + @Override + public Component describeAcceptableArguments() { + return this.choices; + } + + private Stream getWorlds() { + try { + return Files.list(Fawe.platform().getWorldsFolder()).filter(p -> Files.isDirectory(p) && Files.exists(p.resolve( + "level.dat"))).map(p -> p.getFileName().toString()); + } catch (IOException e) { + LOGGER.error("Error accessing worlds", e); + return Stream.empty(); + } + } + + @Override + public List getSuggestions(String input, InjectedValueAccess context) { + return getWorlds() + .filter(world -> world.startsWith(input)) + .collect(Collectors.toList()); + } + + @Override + public ConversionResult convert(String s, InjectedValueAccess injectedValueAccess) { + String result = getWorlds() + .filter(world -> world.equals(s)) + .findAny().orElse(null); + return result == null + ? FailedConversion.from(new IllegalArgumentException( + "Not a valid world: " + s)) + : SuccessfulConversion.fromSingle(MCAWorld.of(result)); + } + +} diff --git a/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/Platform.java b/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/Platform.java index f20f3ce9b..72997b85b 100644 --- a/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/Platform.java +++ b/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/Platform.java @@ -276,5 +276,21 @@ public interface Platform extends Keyed { default IBatchProcessor getPlatformPostProcessor(boolean fastMode) { return null; } + + /** + * Get a char array of minecraft internal IDs against FAWE char IDs + */ + @Nullable + default char[] getIbdToStateOrdinal() { + return null; + } + + /** + * Get an int array of FAWE char IDs against minecraft internal IDs + */ + @Nullable + default int[] getOrdinalToIbdID() { + return null; + } //FAWE end } diff --git a/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/PlatformCommandManager.java b/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/PlatformCommandManager.java index 7b493cf98..3166666ab 100644 --- a/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/PlatformCommandManager.java +++ b/worldedit-core/src/main/java/com/sk89q/worldedit/extension/platform/PlatformCommandManager.java @@ -41,6 +41,8 @@ import com.sk89q.worldedit.LocalConfiguration; import com.sk89q.worldedit.LocalSession; import com.sk89q.worldedit.MissingWorldException; import com.sk89q.worldedit.WorldEdit; +import com.sk89q.worldedit.command.AnvilCommands; +import com.sk89q.worldedit.command.AnvilCommandsRegistration; import com.sk89q.worldedit.command.ApplyBrushCommands; import com.sk89q.worldedit.command.BiomeCommands; import com.sk89q.worldedit.command.BiomeCommandsRegistration; @@ -95,6 +97,7 @@ import com.sk89q.worldedit.command.argument.ExpressionConverter; import com.sk89q.worldedit.command.argument.FactoryConverter; import com.sk89q.worldedit.command.argument.HeightConverter; import com.sk89q.worldedit.command.argument.LocationConverter; +import com.sk89q.worldedit.command.argument.MCAWorldConverter; import com.sk89q.worldedit.command.argument.OffsetConverter; import com.sk89q.worldedit.command.argument.RegionFactoryConverter; import com.sk89q.worldedit.command.argument.RegistryConverter; @@ -273,6 +276,7 @@ public final class PlatformCommandManager { HeightConverter.register(commandManager); OffsetConverter.register(worldEdit, commandManager); //FAWE start + MCAWorldConverter.register(commandManager); commandManager.registerConverter( Key.of(com.sk89q.worldedit.function.pattern.Pattern.class, Annotations.patternList()), CommaSeparatedValuesConverter.wrap(commandManager.getConverter(Key.of( @@ -548,6 +552,13 @@ public final class PlatformCommandManager { HistorySubCommandsRegistration.builder(), new HistorySubCommands(history) ); + registerSubCommands( + "/anvil", + ImmutableList.of(), + "Anvil commands", + AnvilCommandsRegistration.builder(), + new AnvilCommands(worldEdit) + ); //FAWE end this.registration.register( commandManager, diff --git a/worldedit-core/src/main/java/com/sk89q/worldedit/regions/AbstractRegion.java b/worldedit-core/src/main/java/com/sk89q/worldedit/regions/AbstractRegion.java index 85979b217..00ab5af3a 100644 --- a/worldedit-core/src/main/java/com/sk89q/worldedit/regions/AbstractRegion.java +++ b/worldedit-core/src/main/java/com/sk89q/worldedit/regions/AbstractRegion.java @@ -20,8 +20,6 @@ package com.sk89q.worldedit.regions; import com.fastasyncworldedit.core.math.BlockVectorSet; -import com.sk89q.worldedit.WorldEdit; -import com.sk89q.worldedit.extension.platform.Capability; import com.sk89q.worldedit.math.BlockVector2; import com.sk89q.worldedit.math.BlockVector3; import com.sk89q.worldedit.math.Vector3; diff --git a/worldedit-core/src/main/resources/lang/strings.json b/worldedit-core/src/main/resources/lang/strings.json index 5e922cdae..7e303dfe5 100644 --- a/worldedit-core/src/main/resources/lang/strings.json +++ b/worldedit-core/src/main/resources/lang/strings.json @@ -32,6 +32,7 @@ "fawe.worldedit.history.command.undo.disabled": "Undo disabled, use: //fast", "fawe.worldedit.selection.selection.count": "Counted {0} blocks.", "fawe.worldedit.anvil.world.is.loaded": "The world shouldn't be in use when executing. Unload the world, or use -f to override (save first)", + "fawe.worldedit.anvil.replaceall.complete": "Replaceall with anvil completed.", "fawe.worldedit.brush.brush.reset": "Reset your brush. (SHIFT + Click)", "fawe.worldedit.brush.brush.none": "You aren't holding a brush!", "fawe.worldedit.brush.brush.scroll.action.set": "Set scroll action to {0}",