3
0
Mirror von https://github.com/IntellectualSites/FastAsyncWorldEdit.git synchronisiert 2024-11-19 09:20:08 +01:00

Merge branch 'main' into feat/improved-entity-operations

Dieser Commit ist enthalten in:
Jordan 2024-06-21 09:17:54 +02:00 committet von GitHub
Commit 8ec9de0054
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: B5690EEEBB952194
32 geänderte Dateien mit 195 neuen und 144 gelöschten Zeilen

Datei anzeigen

@ -14,7 +14,7 @@ mapmanager = "1.8.0-SNAPSHOT"
griefprevention = "17.0.0"
griefdefender = "2.1.0-SNAPSHOT"
residence = "4.5._13.1"
towny = "0.100.2.14"
towny = "0.100.3.0"
plotsquared = "7.3.8"
# Third party

Datei anzeigen

@ -807,7 +807,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true);
// send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send();
}
if (finalizer != null) {
@ -1092,31 +1092,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null;
}
PalettedContainer<Holder<Biome>> biomeData;
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) {
continue;
biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
}
}

Datei anzeigen

@ -47,7 +47,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int chunkZ;
final ServerLevel serverLevel;
final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null;
private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk;
@ -163,7 +163,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override
public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
}
@ -192,10 +192,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()];
biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
}
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy();
for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else {
LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -139,7 +139,14 @@ public final class PaperweightPlatformAdapter extends NMSAdapter {
fieldTickingFluidCount.setAccessible(true);
fieldTickingBlockCount = LevelChunkSection.class.getDeclaredField(Refraction.pickName("tickingBlockCount", "g"));
fieldTickingBlockCount.setAccessible(true);
fieldBiomes = LevelChunkSection.class.getDeclaredField(Refraction.pickName("biomes", "j"));
Field tmpFieldBiomes;
try {
// It seems to actually be biomes, but is apparently obfuscated to "j"
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("biomes");
} catch (NoSuchFieldException ignored) {
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("j");
}
fieldBiomes = tmpFieldBiomes;
fieldBiomes.setAccessible(true);
Method getVisibleChunkIfPresent = ChunkMap.class.getDeclaredMethod(Refraction.pickName(

Datei anzeigen

@ -805,7 +805,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true);
// send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send();
}
if (finalizer != null) {
@ -1089,31 +1089,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null;
}
PalettedContainer<Holder<Biome>> biomeData;
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) {
continue;
biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
}
}

Datei anzeigen

@ -47,7 +47,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int chunkZ;
final ServerLevel serverLevel;
final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null;
private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk;
@ -163,7 +163,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override
public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
}
@ -192,10 +192,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()];
biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
}
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy();
for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else {
LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -148,7 +148,14 @@ public final class PaperweightPlatformAdapter extends NMSAdapter {
fieldTickingFluidCount.setAccessible(true);
fieldTickingBlockCount = LevelChunkSection.class.getDeclaredField(Refraction.pickName("tickingBlockCount", "f"));
fieldTickingBlockCount.setAccessible(true);
fieldBiomes = LevelChunkSection.class.getDeclaredField(Refraction.pickName("biomes", "i"));
Field tmpFieldBiomes;
try {
// It seems to actually be biomes, but is apparently obfuscated to "i"
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("biomes");
} catch (NoSuchFieldException ignored) {
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("i");
}
fieldBiomes = tmpFieldBiomes;
fieldBiomes.setAccessible(true);
Method getVisibleChunkIfPresent = ChunkMap.class.getDeclaredMethod(Refraction.pickName(

Datei anzeigen

@ -812,7 +812,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true);
// send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send();
}
if (finalizer != null) {
@ -1096,31 +1096,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null;
}
PalettedContainer<Holder<Biome>> biomeData;
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) {
continue;
biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
}
}

Datei anzeigen

@ -47,7 +47,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int chunkZ;
final ServerLevel serverLevel;
final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null;
private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk;
@ -163,7 +163,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override
public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
}
@ -192,10 +192,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()];
biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
}
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy();
for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else {
LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -142,7 +142,14 @@ public final class PaperweightPlatformAdapter extends NMSAdapter {
fieldTickingFluidCount.setAccessible(true);
fieldTickingBlockCount = LevelChunkSection.class.getDeclaredField(Refraction.pickName("tickingBlockCount", "f"));
fieldTickingBlockCount.setAccessible(true);
fieldBiomes = LevelChunkSection.class.getDeclaredField(Refraction.pickName("biomes", "i"));
Field tmpFieldBiomes;
try {
// It seems to actually be biomes, but is apparently obfuscated to "i"
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("biomes");
} catch (NoSuchFieldException ignored) {
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("i");
}
fieldBiomes = tmpFieldBiomes;
fieldBiomes.setAccessible(true);
Method getVisibleChunkIfPresent = ChunkMap.class.getDeclaredMethod(Refraction.pickName(

Datei anzeigen

@ -811,7 +811,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true);
// send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send();
}
if (finalizer != null) {
@ -1093,31 +1093,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null;
}
PalettedContainer<Holder<Biome>> biomeData;
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) {
continue;
biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
}
}

Datei anzeigen

@ -47,7 +47,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int chunkZ;
final ServerLevel serverLevel;
final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null;
private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk;
@ -163,7 +163,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override
public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
}
@ -192,10 +192,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()];
biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
}
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy();
for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else {
LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -142,7 +142,14 @@ public final class PaperweightPlatformAdapter extends NMSAdapter {
fieldTickingFluidCount.setAccessible(true);
fieldTickingBlockCount = LevelChunkSection.class.getDeclaredField(Refraction.pickName("tickingBlockCount", "f"));
fieldTickingBlockCount.setAccessible(true);
fieldBiomes = LevelChunkSection.class.getDeclaredField(Refraction.pickName("biomes", "i"));
Field tmpFieldBiomes;
try {
// It seems to actually be biomes, but is apparently obfuscated to "i"
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("biomes");
} catch (NoSuchFieldException ignored) {
tmpFieldBiomes = LevelChunkSection.class.getDeclaredField("i");
}
fieldBiomes = tmpFieldBiomes;
fieldBiomes.setAccessible(true);
Method getVisibleChunkIfPresent = ChunkMap.class.getDeclaredMethod(Refraction.pickName(

Datei anzeigen

@ -12,6 +12,6 @@ repositories {
dependencies {
// url=https://repo.papermc.io/service/rest/repository/browse/maven-public/io/papermc/paper/dev-bundle/1.20.6-R0.1-SNAPSHOT/
the<PaperweightUserDependenciesExtension>().paperDevBundle("1.20.6-R0.1-20240604.210637-112")
the<PaperweightUserDependenciesExtension>().paperDevBundle("1.20.6-R0.1-20240615.211816-120")
compileOnly(libs.paperlib)
}

Datei anzeigen

@ -809,7 +809,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true);
// send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) {
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send();
}
if (finalizer != null) {
@ -1093,31 +1093,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null;
}
PalettedContainer<Holder<Biome>> biomeData;
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
PalettedContainer<Holder<Biome>> biomeData = data.recreate();
for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) {
continue;
biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
}
}
}

Datei anzeigen

@ -48,7 +48,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int chunkZ;
final ServerLevel serverLevel;
final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null;
private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk;
@ -164,7 +164,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override
public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2);
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel);
}
@ -193,10 +193,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()];
biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
}
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy();
for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else {
LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -1,5 +1,6 @@
package com.fastasyncworldedit.bukkit.regions;
import com.fastasyncworldedit.core.configuration.Settings;
import com.fastasyncworldedit.core.regions.FaweMask;
import com.fastasyncworldedit.core.regions.RegionWrapper;
import com.sk89q.worldedit.bukkit.BukkitAdapter;
@ -158,6 +159,9 @@ public class WorldGuardFeature extends BukkitMaskManager implements Listener {
@Override
public FaweMask getMask(com.sk89q.worldedit.entity.Player wePlayer, MaskType type, boolean isWhitelist) {
if (isWhitelist && Settings.settings().REGION_RESTRICTIONS_OPTIONS.WORLDGUARD_REGION_BLACKLIST) {
return new FaweMask(RegionWrapper.GLOBAL());
}
final Player player = BukkitAdapter.adapt(wePlayer);
final LocalPlayer localplayer = this.worldguard.wrapPlayer(player);
final Location location = player.getLocation();

Datei anzeigen

@ -2,6 +2,7 @@ package com.fastasyncworldedit.core;
import com.fastasyncworldedit.core.configuration.Settings;
import com.fastasyncworldedit.core.internal.exception.FaweException;
import com.fastasyncworldedit.core.limit.FaweLimit;
import com.fastasyncworldedit.core.queue.implementation.QueueHandler;
import com.fastasyncworldedit.core.util.CachedTextureUtil;
import com.fastasyncworldedit.core.util.CleanTextureUtil;
@ -105,6 +106,8 @@ public class Fawe {
* Implementation dependent stuff
*/
this.setupConfigs();
FaweLimit.MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
TaskManager.IMP = this.implementation.getTaskManager();
TaskManager.taskManager().async(() -> {

Datei anzeigen

@ -798,6 +798,11 @@ public class Settings extends Config {
})
public boolean UNSTUCK_ON_GENERATE = true;
@Comment({
"If unlimited limits should still require /confirm on large. Defaults to limits.default.confirm-large otherwise."
})
public boolean LIMIT_UNLIMITED_CONFIRMS = true;
}
}

Datei anzeigen

@ -1,6 +1,7 @@
package com.fastasyncworldedit.core.limit;
import com.fastasyncworldedit.core.FaweCache;
import com.fastasyncworldedit.core.configuration.Settings;
import java.util.Collections;
import java.util.Set;
@ -121,7 +122,8 @@ public class FaweLimit {
MAX.SCHEM_FILE_SIZE_LIMIT = Integer.MAX_VALUE;
MAX.MAX_EXPRESSION_MS = 50;
MAX.FAST_PLACEMENT = true;
MAX.CONFIRM_LARGE = true;
MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
MAX.RESTRICT_HISTORY_TO_REGIONS = false;
MAX.STRIP_NBT = Collections.emptySet();
MAX.UNIVERSAL_DISALLOWED_BLOCKS = false;

Datei anzeigen

@ -115,7 +115,7 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
* A filter block is used to iterate over blocks / positions. Essentially combines BlockVector3,
* Extent and BlockState functions in a way that avoids lookups.
*/
ChunkFilterBlock initFilterBlock();
ChunkFilterBlock createFilterBlock();
/**
* Returns the number of chunks in this queue.
@ -129,7 +129,14 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
*/
boolean isEmpty();
default ChunkFilterBlock apply(ChunkFilterBlock block, Filter filter, Region region, int chunkX, int chunkZ, boolean full) {
default ChunkFilterBlock apply(
@Nullable ChunkFilterBlock block,
Filter filter,
Region region,
int chunkX,
int chunkZ,
boolean full
) {
if (!filter.appliesChunk(chunkX, chunkZ)) {
return block;
}
@ -139,8 +146,9 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
if (newChunk != null) {
chunk = newChunk;
if (block == null) {
block = this.initFilterBlock();
block = this.createFilterBlock();
}
block.initChunk(chunkX, chunkZ);
chunk.filterBlocks(filter, block, region, full);
}
this.submit(chunk);

Datei anzeigen

@ -28,6 +28,7 @@ import com.sk89q.worldedit.function.pattern.Pattern;
import com.sk89q.worldedit.internal.util.LogManagerCompat;
import com.sk89q.worldedit.math.BlockVector2;
import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.regions.CuboidRegion;
import com.sk89q.worldedit.regions.Region;
import com.sk89q.worldedit.util.Countable;
import com.sk89q.worldedit.world.World;
@ -133,14 +134,16 @@ public class ParallelQueueExtent extends PassthroughExtent {
final int size = Math.min(chunks.size(), Settings.settings().QUEUE.PARALLEL_THREADS);
if (size <= 1) {
// if PQE is ever used with PARALLEL_THREADS = 1, or only one chunk is edited, just run sequentially
ChunkFilterBlock block = null;
while (chunksIter.hasNext()) {
BlockVector2 pos = chunksIter.next();
getExtent().apply(null, filter, region, pos.x(), pos.z(), full);
block = getExtent().apply(block, filter, region, pos.x(), pos.z(), full);
}
} else {
final ForkJoinTask[] tasks = IntStream.range(0, size).mapToObj(i -> handler.submit(() -> {
try {
final Filter newFilter = filter.fork();
final Region newRegion = region.clone();
// Create a chunk that we will reuse/reset for each operation
final SingleThreadQueueExtent queue = (SingleThreadQueueExtent) getNewQueue();
queue.setFastMode(fastmode);
@ -162,7 +165,7 @@ public class ParallelQueueExtent extends PassthroughExtent {
chunkX = pos.x();
chunkZ = pos.z();
}
block = queue.apply(block, newFilter, region, chunkX, chunkZ, full);
block = queue.apply(block, newFilter, newRegion, chunkX, chunkZ, full);
}
queue.flush();
} catch (Throwable t) {

Datei anzeigen

@ -473,7 +473,7 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen
}
@Override
public ChunkFilterBlock initFilterBlock() {
public ChunkFilterBlock createFilterBlock() {
return new CharFilterBlock(this);
}

Datei anzeigen

@ -17,13 +17,23 @@ public abstract class CharBlocks implements IBlocks {
protected static final Section FULL = new Section() {
@Override
public char[] get(CharBlocks blocks, int layer) {
return blocks.blocks[layer];
char[] arr = blocks.blocks[layer];
if (arr == null) {
// Chunk probably trimmed mid-operations, but do nothing about it to avoid other issues
return EMPTY.get(blocks, layer, false);
}
return arr;
}
// Ignore aggressive switch here.
@Override
public char[] get(CharBlocks blocks, int layer, boolean aggressive) {
return blocks.blocks[layer];
char[] arr = blocks.blocks[layer];
if (arr == null) {
// Chunk probably trimmed mid-operations, but do nothing about it to avoid other issues
return EMPTY.get(blocks, layer, false);
}
return arr;
}
@Override

Datei anzeigen

@ -71,7 +71,7 @@ public class PolyhedralRegion extends AbstractRegion {
public PolyhedralRegion(PolyhedralRegion region) {
this(region.world);
vertices.addAll(region.vertices);
triangles.addAll(region.triangles);
region.triangles.forEach(triangle -> triangles.add(triangle.clone()));
vertexBacklog.addAll(region.vertexBacklog);
minimumPoint = region.minimumPoint;

Datei anzeigen

@ -7,10 +7,14 @@ import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.math.Vector3;
import com.sk89q.worldedit.regions.polyhedron.Edge;
public class Triangle {
public class Triangle implements Cloneable {
public static double RADIUS = 0.5;
private final BlockVector3 pos1;
private final BlockVector3 pos2;
private final BlockVector3 pos3;
private final double[][] verts = new double[3][3];
private final double[] center = new double[3];
private final double[] radius = new double[3];
@ -28,6 +32,9 @@ public class Triangle {
private final double b;
public Triangle(BlockVector3 pos1, BlockVector3 pos2, BlockVector3 pos3) {
this.pos1 = pos1;
this.pos2 = pos2;
this.pos3 = pos3;
verts[0] = new double[]{pos1.x(), pos1.y(), pos1.z()};
verts[1] = new double[]{pos2.x(), pos2.y(), pos2.z()};
verts[2] = new double[]{pos3.x(), pos3.y(), pos3.z()};
@ -290,4 +297,9 @@ public class Triangle {
return dot(normal, vmax) >= 0.0f;
}
@Override
public Triangle clone() {
return new Triangle(pos1, pos2, pos3);
}
}

Datei anzeigen

@ -22,6 +22,8 @@ package com.sk89q.worldedit.command;
import com.fastasyncworldedit.core.Fawe;
import com.fastasyncworldedit.core.FaweVersion;
import com.fastasyncworldedit.core.configuration.Caption;
import com.fastasyncworldedit.core.configuration.Settings;
import com.fastasyncworldedit.core.limit.FaweLimit;
import com.fastasyncworldedit.core.util.UpdateNotification;
import com.intellectualsites.paster.IncendoPaster;
import com.sk89q.worldedit.LocalSession;
@ -97,6 +99,8 @@ public class WorldEditCommands {
.getConfiguration()));
//FAWE start
Fawe.instance().setupConfigs();
FaweLimit.MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
//FAWE end
actor.print(Caption.of("worldedit.reload.config"));
}

Datei anzeigen

@ -106,7 +106,8 @@ public class MaskingExtent extends AbstractDelegateExtent implements IBatchProce
@Override
public IChunkSet processSet(final IChunk chunk, final IChunkGet get, final IChunkSet set) {
final ChunkFilterBlock filter = getOrCreateFilterBlock.apply(Thread.currentThread().getId());
return filter.filter(chunk, get, set, MaskingExtent.this);
filter.initChunk(chunk.getX(), chunk.getZ());
return filter.filter(chunk, get, set, this);
}
@Override

Datei anzeigen

@ -775,7 +775,6 @@ public class CuboidRegion extends AbstractRegion implements FlatRegion {
) {
int chunkX = chunk.getX();
int chunkZ = chunk.getZ();
block = block.initChunk(chunkX, chunkZ);
//Chunk entry is an "interior chunk" in regards to the entire region, so filter the chunk whole instead of partially
if ((minX + 15) >> 4 <= chunkX && (maxX - 15) >> 4 >= chunkX && (minZ + 15) >> 4 <= chunkZ && (maxZ - 15) >> 4 >= chunkZ) {

Datei anzeigen

@ -412,8 +412,6 @@ public class EllipsoidRegion extends AbstractRegion {
return;
}
block = block.initChunk(chunk.getX(), chunk.getZ());
// Get the solid layers
int cy = center.y();
int diffYFull = MathMan.usqrt(diffY2);

Datei anzeigen

@ -268,7 +268,6 @@ public interface Region extends Iterable<BlockVector3>, Cloneable, IBatchProcess
) {
int minSection = Math.max(get.getMinSectionPosition(), getMinimumY() >> 4);
int maxSection = Math.min(get.getMaxSectionPosition(), getMaximumY() >> 4);
block = block.initChunk(chunk.getX(), chunk.getZ());
for (int layer = minSection; layer <= maxSection; layer++) {
if ((!full && !get.hasSection(layer)) || !filter.appliesLayer(chunk, layer)) {
return;
@ -483,7 +482,7 @@ public interface Region extends Iterable<BlockVector3>, Cloneable, IBatchProcess
}
return set;
} else {
return null;
return set;
}
}

Datei anzeigen

@ -205,7 +205,7 @@ public class RegionIntersection extends AbstractRegion {
BlockVector3 regMin = region.getMinimumPoint();
BlockVector3 regMax = region.getMaximumPoint();
if (tx >= regMin.x() && bx <= regMax.x() && tz >= regMin.z() && bz <= regMax.z()) {
return region.processSet(chunk, get, set, true);
set = region.processSet(chunk, get, set, true);
}
}
return set; // default return set as no "blacklist" regions contained the chunk