Mirror von
https://github.com/IntellectualSites/FastAsyncWorldEdit.git
synchronisiert 2024-11-19 09:20:08 +01:00
Merge remote-tracking branch 'origin/main' into feat/spongeSchemV3
Dieser Commit ist enthalten in:
Commit
84ea606092
@ -824,7 +824,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
nmsChunk.mustNotSave = false;
|
||||
nmsChunk.setUnsaved(true);
|
||||
// send to player
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
|
||||
this.send();
|
||||
}
|
||||
if (finalizer != null) {
|
||||
@ -1109,25 +1109,14 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
|
||||
return null;
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData;
|
||||
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomeData = palettedContainer.copy();
|
||||
} else {
|
||||
LOGGER.warn(
|
||||
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
|
||||
"type {} but got {}",
|
||||
PalettedContainer.class.getSimpleName(),
|
||||
data.getClass().getSimpleName()
|
||||
);
|
||||
biomeData = data.recreate();
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
|
||||
for (int y = 0, index = 0; y < 4; y++) {
|
||||
for (int z = 0; z < 4; z++) {
|
||||
for (int x = 0; x < 4; x++, index++) {
|
||||
BiomeType biomeType = sectionBiomes[index];
|
||||
if (biomeType == null) {
|
||||
continue;
|
||||
}
|
||||
biomeData.set(x, y, z, data.get(x, y, z));
|
||||
} else {
|
||||
biomeData.set(
|
||||
x,
|
||||
y,
|
||||
@ -1137,6 +1126,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return biomeData;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
private final int maxHeight;
|
||||
final ServerLevel serverLevel;
|
||||
final LevelChunk levelChunk;
|
||||
private PalettedContainer<Holder<Biome>>[] biomes = null;
|
||||
private Holder<Biome>[][] biomes = null;
|
||||
|
||||
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
|
||||
this.levelChunk = levelChunk;
|
||||
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
@Override
|
||||
public BiomeType getBiomeType(int x, int y, int z) {
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
|
||||
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
|
||||
}
|
||||
|
||||
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
|
||||
if (biomes == null) {
|
||||
biomes = new PalettedContainer[getSectionCount()];
|
||||
biomes = new Holder[getSectionCount()][];
|
||||
}
|
||||
if (biomes[layer] == null) {
|
||||
biomes[layer] = new Holder[64];
|
||||
}
|
||||
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomes[layer] = palettedContainer.copy();
|
||||
for (int i = 0; i < 64; i++) {
|
||||
biomes[layer][i] = palettedContainer.get(i);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error(
|
||||
"Cannot correctly save biomes to history. Expected class type {} but got {}",
|
||||
|
@ -822,7 +822,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
nmsChunk.mustNotSave = false;
|
||||
nmsChunk.setUnsaved(true);
|
||||
// send to player
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
|
||||
this.send();
|
||||
}
|
||||
if (finalizer != null) {
|
||||
@ -1106,25 +1106,14 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
|
||||
return null;
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData;
|
||||
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomeData = palettedContainer.copy();
|
||||
} else {
|
||||
LOGGER.warn(
|
||||
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
|
||||
"type {} but got {}",
|
||||
PalettedContainer.class.getSimpleName(),
|
||||
data.getClass().getSimpleName()
|
||||
);
|
||||
biomeData = data.recreate();
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
|
||||
for (int y = 0, index = 0; y < 4; y++) {
|
||||
for (int z = 0; z < 4; z++) {
|
||||
for (int x = 0; x < 4; x++, index++) {
|
||||
BiomeType biomeType = sectionBiomes[index];
|
||||
if (biomeType == null) {
|
||||
continue;
|
||||
}
|
||||
biomeData.set(x, y, z, data.get(x, y, z));
|
||||
} else {
|
||||
biomeData.set(
|
||||
x,
|
||||
y,
|
||||
@ -1134,6 +1123,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return biomeData;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
private final int maxHeight;
|
||||
final ServerLevel serverLevel;
|
||||
final LevelChunk levelChunk;
|
||||
private PalettedContainer<Holder<Biome>>[] biomes = null;
|
||||
private Holder<Biome>[][] biomes = null;
|
||||
|
||||
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
|
||||
this.levelChunk = levelChunk;
|
||||
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
@Override
|
||||
public BiomeType getBiomeType(int x, int y, int z) {
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
|
||||
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
|
||||
}
|
||||
|
||||
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
|
||||
if (biomes == null) {
|
||||
biomes = new PalettedContainer[getSectionCount()];
|
||||
biomes = new Holder[getSectionCount()][];
|
||||
}
|
||||
if (biomes[layer] == null) {
|
||||
biomes[layer] = new Holder[64];
|
||||
}
|
||||
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomes[layer] = palettedContainer.copy();
|
||||
for (int i = 0; i < 64; i++) {
|
||||
biomes[layer][i] = palettedContainer.get(i);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error(
|
||||
"Cannot correctly save biomes to history. Expected class type {} but got {}",
|
||||
|
@ -830,7 +830,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
nmsChunk.mustNotSave = false;
|
||||
nmsChunk.setUnsaved(true);
|
||||
// send to player
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
|
||||
this.send();
|
||||
}
|
||||
if (finalizer != null) {
|
||||
@ -1114,25 +1114,14 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
|
||||
return null;
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData;
|
||||
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomeData = palettedContainer.copy();
|
||||
} else {
|
||||
LOGGER.warn(
|
||||
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
|
||||
"type {} but got {}",
|
||||
PalettedContainer.class.getSimpleName(),
|
||||
data.getClass().getSimpleName()
|
||||
);
|
||||
biomeData = data.recreate();
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
|
||||
for (int y = 0, index = 0; y < 4; y++) {
|
||||
for (int z = 0; z < 4; z++) {
|
||||
for (int x = 0; x < 4; x++, index++) {
|
||||
BiomeType biomeType = sectionBiomes[index];
|
||||
if (biomeType == null) {
|
||||
continue;
|
||||
}
|
||||
biomeData.set(x, y, z, data.get(x, y, z));
|
||||
} else {
|
||||
biomeData.set(
|
||||
x,
|
||||
y,
|
||||
@ -1142,6 +1131,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return biomeData;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
private final int maxHeight;
|
||||
final ServerLevel serverLevel;
|
||||
final LevelChunk levelChunk;
|
||||
private PalettedContainer<Holder<Biome>>[] biomes = null;
|
||||
private Holder<Biome>[][] biomes = null;
|
||||
|
||||
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
|
||||
this.levelChunk = levelChunk;
|
||||
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
@Override
|
||||
public BiomeType getBiomeType(int x, int y, int z) {
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
|
||||
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
|
||||
}
|
||||
|
||||
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
|
||||
if (biomes == null) {
|
||||
biomes = new PalettedContainer[getSectionCount()];
|
||||
biomes = new Holder[getSectionCount()][];
|
||||
}
|
||||
if (biomes[layer] == null) {
|
||||
biomes[layer] = new Holder[64];
|
||||
}
|
||||
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomes[layer] = palettedContainer.copy();
|
||||
for (int i = 0; i < 64; i++) {
|
||||
biomes[layer][i] = palettedContainer.get(i);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error(
|
||||
"Cannot correctly save biomes to history. Expected class type {} but got {}",
|
||||
|
@ -829,7 +829,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
nmsChunk.mustNotSave = false;
|
||||
nmsChunk.setUnsaved(true);
|
||||
// send to player
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
|
||||
this.send();
|
||||
}
|
||||
if (finalizer != null) {
|
||||
@ -1111,25 +1111,14 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
|
||||
return null;
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData;
|
||||
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomeData = palettedContainer.copy();
|
||||
} else {
|
||||
LOGGER.warn(
|
||||
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
|
||||
"type {} but got {}",
|
||||
PalettedContainer.class.getSimpleName(),
|
||||
data.getClass().getSimpleName()
|
||||
);
|
||||
biomeData = data.recreate();
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
|
||||
for (int y = 0, index = 0; y < 4; y++) {
|
||||
for (int z = 0; z < 4; z++) {
|
||||
for (int x = 0; x < 4; x++, index++) {
|
||||
BiomeType biomeType = sectionBiomes[index];
|
||||
if (biomeType == null) {
|
||||
continue;
|
||||
}
|
||||
biomeData.set(x, y, z, data.get(x, y, z));
|
||||
} else {
|
||||
biomeData.set(
|
||||
x,
|
||||
y,
|
||||
@ -1139,6 +1128,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return biomeData;
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
private final int maxHeight;
|
||||
final ServerLevel serverLevel;
|
||||
final LevelChunk levelChunk;
|
||||
private PalettedContainer<Holder<Biome>>[] biomes = null;
|
||||
private Holder<Biome>[][] biomes = null;
|
||||
|
||||
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
|
||||
this.levelChunk = levelChunk;
|
||||
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
@Override
|
||||
public BiomeType getBiomeType(int x, int y, int z) {
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
|
||||
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
|
||||
}
|
||||
|
||||
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
|
||||
if (biomes == null) {
|
||||
biomes = new PalettedContainer[getSectionCount()];
|
||||
biomes = new Holder[getSectionCount()][];
|
||||
}
|
||||
if (biomes[layer] == null) {
|
||||
biomes[layer] = new Holder[64];
|
||||
}
|
||||
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomes[layer] = palettedContainer.copy();
|
||||
for (int i = 0; i < 64; i++) {
|
||||
biomes[layer][i] = palettedContainer.get(i);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error(
|
||||
"Cannot correctly save biomes to history. Expected class type {} but got {}",
|
||||
|
@ -830,7 +830,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
nmsChunk.mustNotSave = false;
|
||||
nmsChunk.setUnsaved(true);
|
||||
// send to player
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
|
||||
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
|
||||
this.send();
|
||||
}
|
||||
if (finalizer != null) {
|
||||
@ -1114,25 +1114,14 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
|
||||
return null;
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData;
|
||||
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomeData = palettedContainer.copy();
|
||||
} else {
|
||||
LOGGER.warn(
|
||||
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
|
||||
"type {} but got {}",
|
||||
PalettedContainer.class.getSimpleName(),
|
||||
data.getClass().getSimpleName()
|
||||
);
|
||||
biomeData = data.recreate();
|
||||
}
|
||||
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
|
||||
for (int y = 0, index = 0; y < 4; y++) {
|
||||
for (int z = 0; z < 4; z++) {
|
||||
for (int x = 0; x < 4; x++, index++) {
|
||||
BiomeType biomeType = sectionBiomes[index];
|
||||
if (biomeType == null) {
|
||||
continue;
|
||||
}
|
||||
biomeData.set(x, y, z, data.get(x, y, z));
|
||||
} else {
|
||||
biomeData.set(
|
||||
x,
|
||||
y,
|
||||
@ -1142,6 +1131,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return biomeData;
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
private final int maxHeight;
|
||||
final ServerLevel serverLevel;
|
||||
final LevelChunk levelChunk;
|
||||
private PalettedContainer<Holder<Biome>>[] biomes = null;
|
||||
private Holder<Biome>[][] biomes = null;
|
||||
|
||||
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
|
||||
this.levelChunk = levelChunk;
|
||||
@ -145,7 +145,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
@Override
|
||||
public BiomeType getBiomeType(int x, int y, int z) {
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
|
||||
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
|
||||
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
|
||||
}
|
||||
|
||||
@ -174,10 +174,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
|
||||
|
||||
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
|
||||
if (biomes == null) {
|
||||
biomes = new PalettedContainer[getSectionCount()];
|
||||
biomes = new Holder[getSectionCount()][];
|
||||
}
|
||||
if (biomes[layer] == null) {
|
||||
biomes[layer] = new Holder[64];
|
||||
}
|
||||
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
|
||||
biomes[layer] = palettedContainer.copy();
|
||||
for (int i = 0; i < 64; i++) {
|
||||
biomes[layer][i] = palettedContainer.get(i);
|
||||
}
|
||||
} else {
|
||||
LOGGER.error(
|
||||
"Cannot correctly save biomes to history. Expected class type {} but got {}",
|
||||
|
@ -2,6 +2,7 @@ package com.fastasyncworldedit.core;
|
||||
|
||||
import com.fastasyncworldedit.core.configuration.Settings;
|
||||
import com.fastasyncworldedit.core.internal.exception.FaweException;
|
||||
import com.fastasyncworldedit.core.limit.FaweLimit;
|
||||
import com.fastasyncworldedit.core.queue.implementation.QueueHandler;
|
||||
import com.fastasyncworldedit.core.util.CachedTextureUtil;
|
||||
import com.fastasyncworldedit.core.util.CleanTextureUtil;
|
||||
@ -105,6 +106,8 @@ public class Fawe {
|
||||
* Implementation dependent stuff
|
||||
*/
|
||||
this.setupConfigs();
|
||||
FaweLimit.MAX.CONFIRM_LARGE =
|
||||
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
|
||||
TaskManager.IMP = this.implementation.getTaskManager();
|
||||
|
||||
TaskManager.taskManager().async(() -> {
|
||||
|
@ -791,6 +791,11 @@ public class Settings extends Config {
|
||||
})
|
||||
public boolean UNSTUCK_ON_GENERATE = true;
|
||||
|
||||
@Comment({
|
||||
"If unlimited limits should still require /confirm on large. Defaults to limits.default.confirm-large otherwise."
|
||||
})
|
||||
public boolean LIMIT_UNLIMITED_CONFIRMS = true;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package com.fastasyncworldedit.core.limit;
|
||||
|
||||
import com.fastasyncworldedit.core.FaweCache;
|
||||
import com.fastasyncworldedit.core.configuration.Settings;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
@ -121,7 +122,8 @@ public class FaweLimit {
|
||||
MAX.SCHEM_FILE_SIZE_LIMIT = Integer.MAX_VALUE;
|
||||
MAX.MAX_EXPRESSION_MS = 50;
|
||||
MAX.FAST_PLACEMENT = true;
|
||||
MAX.CONFIRM_LARGE = true;
|
||||
MAX.CONFIRM_LARGE =
|
||||
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
|
||||
MAX.RESTRICT_HISTORY_TO_REGIONS = false;
|
||||
MAX.STRIP_NBT = Collections.emptySet();
|
||||
MAX.UNIVERSAL_DISALLOWED_BLOCKS = false;
|
||||
|
@ -115,7 +115,7 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
|
||||
* A filter block is used to iterate over blocks / positions. Essentially combines BlockVector3,
|
||||
* Extent and BlockState functions in a way that avoids lookups.
|
||||
*/
|
||||
ChunkFilterBlock initFilterBlock();
|
||||
ChunkFilterBlock createFilterBlock();
|
||||
|
||||
/**
|
||||
* Returns the number of chunks in this queue.
|
||||
@ -129,7 +129,14 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
|
||||
*/
|
||||
boolean isEmpty();
|
||||
|
||||
default ChunkFilterBlock apply(ChunkFilterBlock block, Filter filter, Region region, int chunkX, int chunkZ, boolean full) {
|
||||
default ChunkFilterBlock apply(
|
||||
@Nullable ChunkFilterBlock block,
|
||||
Filter filter,
|
||||
Region region,
|
||||
int chunkX,
|
||||
int chunkZ,
|
||||
boolean full
|
||||
) {
|
||||
if (!filter.appliesChunk(chunkX, chunkZ)) {
|
||||
return block;
|
||||
}
|
||||
@ -139,8 +146,9 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
|
||||
if (newChunk != null) {
|
||||
chunk = newChunk;
|
||||
if (block == null) {
|
||||
block = this.initFilterBlock();
|
||||
block = this.createFilterBlock();
|
||||
}
|
||||
block.initChunk(chunkX, chunkZ);
|
||||
chunk.filterBlocks(filter, block, region, full);
|
||||
}
|
||||
this.submit(chunk);
|
||||
|
@ -28,6 +28,7 @@ import com.sk89q.worldedit.function.pattern.Pattern;
|
||||
import com.sk89q.worldedit.internal.util.LogManagerCompat;
|
||||
import com.sk89q.worldedit.math.BlockVector2;
|
||||
import com.sk89q.worldedit.math.BlockVector3;
|
||||
import com.sk89q.worldedit.regions.CuboidRegion;
|
||||
import com.sk89q.worldedit.regions.Region;
|
||||
import com.sk89q.worldedit.util.Countable;
|
||||
import com.sk89q.worldedit.world.World;
|
||||
@ -133,14 +134,16 @@ public class ParallelQueueExtent extends PassthroughExtent {
|
||||
final int size = Math.min(chunks.size(), Settings.settings().QUEUE.PARALLEL_THREADS);
|
||||
if (size <= 1) {
|
||||
// if PQE is ever used with PARALLEL_THREADS = 1, or only one chunk is edited, just run sequentially
|
||||
ChunkFilterBlock block = null;
|
||||
while (chunksIter.hasNext()) {
|
||||
BlockVector2 pos = chunksIter.next();
|
||||
getExtent().apply(null, filter, region, pos.x(), pos.z(), full);
|
||||
block = getExtent().apply(block, filter, region, pos.x(), pos.z(), full);
|
||||
}
|
||||
} else {
|
||||
final ForkJoinTask[] tasks = IntStream.range(0, size).mapToObj(i -> handler.submit(() -> {
|
||||
try {
|
||||
final Filter newFilter = filter.fork();
|
||||
final Region newRegion = region.clone();
|
||||
// Create a chunk that we will reuse/reset for each operation
|
||||
final SingleThreadQueueExtent queue = (SingleThreadQueueExtent) getNewQueue();
|
||||
queue.setFastMode(fastmode);
|
||||
@ -162,7 +165,7 @@ public class ParallelQueueExtent extends PassthroughExtent {
|
||||
chunkX = pos.x();
|
||||
chunkZ = pos.z();
|
||||
}
|
||||
block = queue.apply(block, newFilter, region, chunkX, chunkZ, full);
|
||||
block = queue.apply(block, newFilter, newRegion, chunkX, chunkZ, full);
|
||||
}
|
||||
queue.flush();
|
||||
} catch (Throwable t) {
|
||||
|
@ -473,7 +473,7 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChunkFilterBlock initFilterBlock() {
|
||||
public ChunkFilterBlock createFilterBlock() {
|
||||
return new CharFilterBlock(this);
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class PolyhedralRegion extends AbstractRegion {
|
||||
public PolyhedralRegion(PolyhedralRegion region) {
|
||||
this(region.world);
|
||||
vertices.addAll(region.vertices);
|
||||
triangles.addAll(region.triangles);
|
||||
region.triangles.forEach(triangle -> triangles.add(triangle.clone()));
|
||||
vertexBacklog.addAll(region.vertexBacklog);
|
||||
|
||||
minimumPoint = region.minimumPoint;
|
||||
|
@ -7,10 +7,14 @@ import com.sk89q.worldedit.math.BlockVector3;
|
||||
import com.sk89q.worldedit.math.Vector3;
|
||||
import com.sk89q.worldedit.regions.polyhedron.Edge;
|
||||
|
||||
public class Triangle {
|
||||
public class Triangle implements Cloneable {
|
||||
|
||||
public static double RADIUS = 0.5;
|
||||
|
||||
private final BlockVector3 pos1;
|
||||
private final BlockVector3 pos2;
|
||||
private final BlockVector3 pos3;
|
||||
|
||||
private final double[][] verts = new double[3][3];
|
||||
private final double[] center = new double[3];
|
||||
private final double[] radius = new double[3];
|
||||
@ -28,6 +32,9 @@ public class Triangle {
|
||||
private final double b;
|
||||
|
||||
public Triangle(BlockVector3 pos1, BlockVector3 pos2, BlockVector3 pos3) {
|
||||
this.pos1 = pos1;
|
||||
this.pos2 = pos2;
|
||||
this.pos3 = pos3;
|
||||
verts[0] = new double[]{pos1.x(), pos1.y(), pos1.z()};
|
||||
verts[1] = new double[]{pos2.x(), pos2.y(), pos2.z()};
|
||||
verts[2] = new double[]{pos3.x(), pos3.y(), pos3.z()};
|
||||
@ -290,4 +297,9 @@ public class Triangle {
|
||||
return dot(normal, vmax) >= 0.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Triangle clone() {
|
||||
return new Triangle(pos1, pos2, pos3);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -22,6 +22,8 @@ package com.sk89q.worldedit.command;
|
||||
import com.fastasyncworldedit.core.Fawe;
|
||||
import com.fastasyncworldedit.core.FaweVersion;
|
||||
import com.fastasyncworldedit.core.configuration.Caption;
|
||||
import com.fastasyncworldedit.core.configuration.Settings;
|
||||
import com.fastasyncworldedit.core.limit.FaweLimit;
|
||||
import com.fastasyncworldedit.core.util.UpdateNotification;
|
||||
import com.intellectualsites.paster.IncendoPaster;
|
||||
import com.sk89q.worldedit.LocalSession;
|
||||
@ -97,6 +99,8 @@ public class WorldEditCommands {
|
||||
.getConfiguration()));
|
||||
//FAWE start
|
||||
Fawe.instance().setupConfigs();
|
||||
FaweLimit.MAX.CONFIRM_LARGE =
|
||||
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
|
||||
//FAWE end
|
||||
actor.print(Caption.of("worldedit.reload.config"));
|
||||
}
|
||||
|
@ -106,7 +106,8 @@ public class MaskingExtent extends AbstractDelegateExtent implements IBatchProce
|
||||
@Override
|
||||
public IChunkSet processSet(final IChunk chunk, final IChunkGet get, final IChunkSet set) {
|
||||
final ChunkFilterBlock filter = getOrCreateFilterBlock.apply(Thread.currentThread().getId());
|
||||
return filter.filter(chunk, get, set, MaskingExtent.this);
|
||||
filter.initChunk(chunk.getX(), chunk.getZ());
|
||||
return filter.filter(chunk, get, set, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -755,7 +755,6 @@ public class CuboidRegion extends AbstractRegion implements FlatRegion {
|
||||
) {
|
||||
int chunkX = chunk.getX();
|
||||
int chunkZ = chunk.getZ();
|
||||
block = block.initChunk(chunkX, chunkZ);
|
||||
|
||||
//Chunk entry is an "interior chunk" in regards to the entire region, so filter the chunk whole instead of partially
|
||||
if ((minX + 15) >> 4 <= chunkX && (maxX - 15) >> 4 >= chunkX && (minZ + 15) >> 4 <= chunkZ && (maxZ - 15) >> 4 >= chunkZ) {
|
||||
|
@ -412,8 +412,6 @@ public class EllipsoidRegion extends AbstractRegion {
|
||||
return;
|
||||
}
|
||||
|
||||
block = block.initChunk(chunk.getX(), chunk.getZ());
|
||||
|
||||
// Get the solid layers
|
||||
int cy = center.y();
|
||||
int diffYFull = MathMan.usqrt(diffY2);
|
||||
|
@ -268,7 +268,6 @@ public interface Region extends Iterable<BlockVector3>, Cloneable, IBatchProcess
|
||||
) {
|
||||
int minSection = Math.max(get.getMinSectionPosition(), getMinimumY() >> 4);
|
||||
int maxSection = Math.min(get.getMaxSectionPosition(), getMaximumY() >> 4);
|
||||
block = block.initChunk(chunk.getX(), chunk.getZ());
|
||||
for (int layer = minSection; layer <= maxSection; layer++) {
|
||||
if ((!full && !get.hasSection(layer)) || !filter.appliesLayer(chunk, layer)) {
|
||||
return;
|
||||
|
Laden…
In neuem Issue referenzieren
Einen Benutzer sperren