3
0
Mirror von https://github.com/IntellectualSites/FastAsyncWorldEdit.git synchronisiert 2024-11-19 09:20:08 +01:00

Merge remote-tracking branch 'origin/main' into feat/spongeSchemV3

Dieser Commit ist enthalten in:
Pierre Maurice Schwang 2024-06-21 00:46:24 +02:00
Commit 84ea606092
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: 37E613079F3E5BB9
23 geänderte Dateien mit 143 neuen und 134 gelöschten Zeilen

Datei anzeigen

@ -824,7 +824,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false; nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true); nmsChunk.setUnsaved(true);
// send to player // send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) { if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send(); this.send();
} }
if (finalizer != null) { if (finalizer != null) {
@ -1109,31 +1109,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) { if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null; return null;
} }
PalettedContainer<Holder<Biome>> biomeData; PalettedContainer<Holder<Biome>> biomeData = data.recreate();
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
for (int y = 0, index = 0; y < 4; y++) { for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) { for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) { for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index]; BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) { if (biomeType == null) {
continue; biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
} }
} }

Datei anzeigen

@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int maxHeight; private final int maxHeight;
final ServerLevel serverLevel; final ServerLevel serverLevel;
final LevelChunk levelChunk; final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null; private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) { protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk; this.levelChunk = levelChunk;
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override @Override
public BiomeType getBiomeType(int x, int y, int z) { public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2); Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel); return PaperweightPlatformAdapter.adapt(biome, serverLevel);
} }
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) { protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) { if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()]; biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
} }
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) { if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy(); for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else { } else {
LOGGER.error( LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}", "Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -822,7 +822,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false; nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true); nmsChunk.setUnsaved(true);
// send to player // send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) { if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send(); this.send();
} }
if (finalizer != null) { if (finalizer != null) {
@ -1106,31 +1106,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) { if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null; return null;
} }
PalettedContainer<Holder<Biome>> biomeData; PalettedContainer<Holder<Biome>> biomeData = data.recreate();
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
for (int y = 0, index = 0; y < 4; y++) { for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) { for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) { for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index]; BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) { if (biomeType == null) {
continue; biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
} }
} }

Datei anzeigen

@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int maxHeight; private final int maxHeight;
final ServerLevel serverLevel; final ServerLevel serverLevel;
final LevelChunk levelChunk; final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null; private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) { protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk; this.levelChunk = levelChunk;
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override @Override
public BiomeType getBiomeType(int x, int y, int z) { public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2); Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel); return PaperweightPlatformAdapter.adapt(biome, serverLevel);
} }
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) { protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) { if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()]; biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
} }
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) { if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy(); for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else { } else {
LOGGER.error( LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}", "Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -830,7 +830,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false; nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true); nmsChunk.setUnsaved(true);
// send to player // send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) { if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send(); this.send();
} }
if (finalizer != null) { if (finalizer != null) {
@ -1114,31 +1114,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) { if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null; return null;
} }
PalettedContainer<Holder<Biome>> biomeData; PalettedContainer<Holder<Biome>> biomeData = data.recreate();
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
for (int y = 0, index = 0; y < 4; y++) { for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) { for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) { for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index]; BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) { if (biomeType == null) {
continue; biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
} }
} }

Datei anzeigen

@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int maxHeight; private final int maxHeight;
final ServerLevel serverLevel; final ServerLevel serverLevel;
final LevelChunk levelChunk; final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null; private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) { protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk; this.levelChunk = levelChunk;
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override @Override
public BiomeType getBiomeType(int x, int y, int z) { public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2); Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel); return PaperweightPlatformAdapter.adapt(biome, serverLevel);
} }
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) { protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) { if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()]; biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
} }
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) { if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy(); for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else { } else {
LOGGER.error( LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}", "Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -829,7 +829,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false; nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true); nmsChunk.setUnsaved(true);
// send to player // send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) { if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send(); this.send();
} }
if (finalizer != null) { if (finalizer != null) {
@ -1111,31 +1111,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) { if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null; return null;
} }
PalettedContainer<Holder<Biome>> biomeData; PalettedContainer<Holder<Biome>> biomeData = data.recreate();
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
for (int y = 0, index = 0; y < 4; y++) { for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) { for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) { for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index]; BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) { if (biomeType == null) {
continue; biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
} }
} }

Datei anzeigen

@ -45,7 +45,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int maxHeight; private final int maxHeight;
final ServerLevel serverLevel; final ServerLevel serverLevel;
final LevelChunk levelChunk; final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null; private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) { protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk; this.levelChunk = levelChunk;
@ -144,7 +144,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override @Override
public BiomeType getBiomeType(int x, int y, int z) { public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2); Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel); return PaperweightPlatformAdapter.adapt(biome, serverLevel);
} }
@ -173,10 +173,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) { protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) { if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()]; biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
} }
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) { if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy(); for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else { } else {
LOGGER.error( LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}", "Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -830,7 +830,7 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
nmsChunk.mustNotSave = false; nmsChunk.mustNotSave = false;
nmsChunk.setUnsaved(true); nmsChunk.setUnsaved(true);
// send to player // send to player
if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING) { if (Settings.settings().LIGHTING.MODE == 0 || !Settings.settings().LIGHTING.DELAY_PACKET_SENDING || finalMask == 0 && biomes != null) {
this.send(); this.send();
} }
if (finalizer != null) { if (finalizer != null) {
@ -1114,31 +1114,21 @@ public class PaperweightGetBlocks extends CharGetBlocks implements BukkitGetBloc
if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) { if (biomes == null || (sectionBiomes = biomes[sectionIndex]) == null) {
return null; return null;
} }
PalettedContainer<Holder<Biome>> biomeData; PalettedContainer<Holder<Biome>> biomeData = data.recreate();
if (data instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomeData = palettedContainer.copy();
} else {
LOGGER.warn(
"Cannot correctly set biomes to world, existing biomes may be lost. Expected class " +
"type {} but got {}",
PalettedContainer.class.getSimpleName(),
data.getClass().getSimpleName()
);
biomeData = data.recreate();
}
for (int y = 0, index = 0; y < 4; y++) { for (int y = 0, index = 0; y < 4; y++) {
for (int z = 0; z < 4; z++) { for (int z = 0; z < 4; z++) {
for (int x = 0; x < 4; x++, index++) { for (int x = 0; x < 4; x++, index++) {
BiomeType biomeType = sectionBiomes[index]; BiomeType biomeType = sectionBiomes[index];
if (biomeType == null) { if (biomeType == null) {
continue; biomeData.set(x, y, z, data.get(x, y, z));
} else {
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
biomeData.set(
x,
y,
z,
biomeHolderIdMap.byIdOrThrow(adapter.getInternalBiomeId(biomeType))
);
} }
} }
} }

Datei anzeigen

@ -46,7 +46,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
private final int maxHeight; private final int maxHeight;
final ServerLevel serverLevel; final ServerLevel serverLevel;
final LevelChunk levelChunk; final LevelChunk levelChunk;
private PalettedContainer<Holder<Biome>>[] biomes = null; private Holder<Biome>[][] biomes = null;
protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) { protected PaperweightGetBlocks_Copy(LevelChunk levelChunk) {
this.levelChunk = levelChunk; this.levelChunk = levelChunk;
@ -145,7 +145,7 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
@Override @Override
public BiomeType getBiomeType(int x, int y, int z) { public BiomeType getBiomeType(int x, int y, int z) {
Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()].get(x >> 2, (y & 15) >> 2, z >> 2); Holder<Biome> biome = biomes[(y >> 4) - getMinSectionPosition()][(y & 12) << 2 | (z & 12) | (x & 12) >> 2];
return PaperweightPlatformAdapter.adapt(biome, serverLevel); return PaperweightPlatformAdapter.adapt(biome, serverLevel);
} }
@ -174,10 +174,15 @@ public class PaperweightGetBlocks_Copy implements IChunkGet {
protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) { protected void storeBiomes(int layer, PalettedContainerRO<Holder<Biome>> biomeData) {
if (biomes == null) { if (biomes == null) {
biomes = new PalettedContainer[getSectionCount()]; biomes = new Holder[getSectionCount()][];
}
if (biomes[layer] == null) {
biomes[layer] = new Holder[64];
} }
if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) { if (biomeData instanceof PalettedContainer<Holder<Biome>> palettedContainer) {
biomes[layer] = palettedContainer.copy(); for (int i = 0; i < 64; i++) {
biomes[layer][i] = palettedContainer.get(i);
}
} else { } else {
LOGGER.error( LOGGER.error(
"Cannot correctly save biomes to history. Expected class type {} but got {}", "Cannot correctly save biomes to history. Expected class type {} but got {}",

Datei anzeigen

@ -2,6 +2,7 @@ package com.fastasyncworldedit.core;
import com.fastasyncworldedit.core.configuration.Settings; import com.fastasyncworldedit.core.configuration.Settings;
import com.fastasyncworldedit.core.internal.exception.FaweException; import com.fastasyncworldedit.core.internal.exception.FaweException;
import com.fastasyncworldedit.core.limit.FaweLimit;
import com.fastasyncworldedit.core.queue.implementation.QueueHandler; import com.fastasyncworldedit.core.queue.implementation.QueueHandler;
import com.fastasyncworldedit.core.util.CachedTextureUtil; import com.fastasyncworldedit.core.util.CachedTextureUtil;
import com.fastasyncworldedit.core.util.CleanTextureUtil; import com.fastasyncworldedit.core.util.CleanTextureUtil;
@ -105,6 +106,8 @@ public class Fawe {
* Implementation dependent stuff * Implementation dependent stuff
*/ */
this.setupConfigs(); this.setupConfigs();
FaweLimit.MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
TaskManager.IMP = this.implementation.getTaskManager(); TaskManager.IMP = this.implementation.getTaskManager();
TaskManager.taskManager().async(() -> { TaskManager.taskManager().async(() -> {

Datei anzeigen

@ -791,6 +791,11 @@ public class Settings extends Config {
}) })
public boolean UNSTUCK_ON_GENERATE = true; public boolean UNSTUCK_ON_GENERATE = true;
@Comment({
"If unlimited limits should still require /confirm on large. Defaults to limits.default.confirm-large otherwise."
})
public boolean LIMIT_UNLIMITED_CONFIRMS = true;
} }
} }

Datei anzeigen

@ -1,6 +1,7 @@
package com.fastasyncworldedit.core.limit; package com.fastasyncworldedit.core.limit;
import com.fastasyncworldedit.core.FaweCache; import com.fastasyncworldedit.core.FaweCache;
import com.fastasyncworldedit.core.configuration.Settings;
import java.util.Collections; import java.util.Collections;
import java.util.Set; import java.util.Set;
@ -121,7 +122,8 @@ public class FaweLimit {
MAX.SCHEM_FILE_SIZE_LIMIT = Integer.MAX_VALUE; MAX.SCHEM_FILE_SIZE_LIMIT = Integer.MAX_VALUE;
MAX.MAX_EXPRESSION_MS = 50; MAX.MAX_EXPRESSION_MS = 50;
MAX.FAST_PLACEMENT = true; MAX.FAST_PLACEMENT = true;
MAX.CONFIRM_LARGE = true; MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
MAX.RESTRICT_HISTORY_TO_REGIONS = false; MAX.RESTRICT_HISTORY_TO_REGIONS = false;
MAX.STRIP_NBT = Collections.emptySet(); MAX.STRIP_NBT = Collections.emptySet();
MAX.UNIVERSAL_DISALLOWED_BLOCKS = false; MAX.UNIVERSAL_DISALLOWED_BLOCKS = false;

Datei anzeigen

@ -115,7 +115,7 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
* A filter block is used to iterate over blocks / positions. Essentially combines BlockVector3, * A filter block is used to iterate over blocks / positions. Essentially combines BlockVector3,
* Extent and BlockState functions in a way that avoids lookups. * Extent and BlockState functions in a way that avoids lookups.
*/ */
ChunkFilterBlock initFilterBlock(); ChunkFilterBlock createFilterBlock();
/** /**
* Returns the number of chunks in this queue. * Returns the number of chunks in this queue.
@ -129,7 +129,14 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
*/ */
boolean isEmpty(); boolean isEmpty();
default ChunkFilterBlock apply(ChunkFilterBlock block, Filter filter, Region region, int chunkX, int chunkZ, boolean full) { default ChunkFilterBlock apply(
@Nullable ChunkFilterBlock block,
Filter filter,
Region region,
int chunkX,
int chunkZ,
boolean full
) {
if (!filter.appliesChunk(chunkX, chunkZ)) { if (!filter.appliesChunk(chunkX, chunkZ)) {
return block; return block;
} }
@ -139,8 +146,9 @@ public interface IQueueExtent<T extends IChunk> extends Flushable, Trimable, ICh
if (newChunk != null) { if (newChunk != null) {
chunk = newChunk; chunk = newChunk;
if (block == null) { if (block == null) {
block = this.initFilterBlock(); block = this.createFilterBlock();
} }
block.initChunk(chunkX, chunkZ);
chunk.filterBlocks(filter, block, region, full); chunk.filterBlocks(filter, block, region, full);
} }
this.submit(chunk); this.submit(chunk);

Datei anzeigen

@ -28,6 +28,7 @@ import com.sk89q.worldedit.function.pattern.Pattern;
import com.sk89q.worldedit.internal.util.LogManagerCompat; import com.sk89q.worldedit.internal.util.LogManagerCompat;
import com.sk89q.worldedit.math.BlockVector2; import com.sk89q.worldedit.math.BlockVector2;
import com.sk89q.worldedit.math.BlockVector3; import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.regions.CuboidRegion;
import com.sk89q.worldedit.regions.Region; import com.sk89q.worldedit.regions.Region;
import com.sk89q.worldedit.util.Countable; import com.sk89q.worldedit.util.Countable;
import com.sk89q.worldedit.world.World; import com.sk89q.worldedit.world.World;
@ -133,14 +134,16 @@ public class ParallelQueueExtent extends PassthroughExtent {
final int size = Math.min(chunks.size(), Settings.settings().QUEUE.PARALLEL_THREADS); final int size = Math.min(chunks.size(), Settings.settings().QUEUE.PARALLEL_THREADS);
if (size <= 1) { if (size <= 1) {
// if PQE is ever used with PARALLEL_THREADS = 1, or only one chunk is edited, just run sequentially // if PQE is ever used with PARALLEL_THREADS = 1, or only one chunk is edited, just run sequentially
ChunkFilterBlock block = null;
while (chunksIter.hasNext()) { while (chunksIter.hasNext()) {
BlockVector2 pos = chunksIter.next(); BlockVector2 pos = chunksIter.next();
getExtent().apply(null, filter, region, pos.x(), pos.z(), full); block = getExtent().apply(block, filter, region, pos.x(), pos.z(), full);
} }
} else { } else {
final ForkJoinTask[] tasks = IntStream.range(0, size).mapToObj(i -> handler.submit(() -> { final ForkJoinTask[] tasks = IntStream.range(0, size).mapToObj(i -> handler.submit(() -> {
try { try {
final Filter newFilter = filter.fork(); final Filter newFilter = filter.fork();
final Region newRegion = region.clone();
// Create a chunk that we will reuse/reset for each operation // Create a chunk that we will reuse/reset for each operation
final SingleThreadQueueExtent queue = (SingleThreadQueueExtent) getNewQueue(); final SingleThreadQueueExtent queue = (SingleThreadQueueExtent) getNewQueue();
queue.setFastMode(fastmode); queue.setFastMode(fastmode);
@ -162,7 +165,7 @@ public class ParallelQueueExtent extends PassthroughExtent {
chunkX = pos.x(); chunkX = pos.x();
chunkZ = pos.z(); chunkZ = pos.z();
} }
block = queue.apply(block, newFilter, region, chunkX, chunkZ, full); block = queue.apply(block, newFilter, newRegion, chunkX, chunkZ, full);
} }
queue.flush(); queue.flush();
} catch (Throwable t) { } catch (Throwable t) {

Datei anzeigen

@ -473,7 +473,7 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen
} }
@Override @Override
public ChunkFilterBlock initFilterBlock() { public ChunkFilterBlock createFilterBlock() {
return new CharFilterBlock(this); return new CharFilterBlock(this);
} }

Datei anzeigen

@ -71,7 +71,7 @@ public class PolyhedralRegion extends AbstractRegion {
public PolyhedralRegion(PolyhedralRegion region) { public PolyhedralRegion(PolyhedralRegion region) {
this(region.world); this(region.world);
vertices.addAll(region.vertices); vertices.addAll(region.vertices);
triangles.addAll(region.triangles); region.triangles.forEach(triangle -> triangles.add(triangle.clone()));
vertexBacklog.addAll(region.vertexBacklog); vertexBacklog.addAll(region.vertexBacklog);
minimumPoint = region.minimumPoint; minimumPoint = region.minimumPoint;

Datei anzeigen

@ -7,10 +7,14 @@ import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.math.Vector3; import com.sk89q.worldedit.math.Vector3;
import com.sk89q.worldedit.regions.polyhedron.Edge; import com.sk89q.worldedit.regions.polyhedron.Edge;
public class Triangle { public class Triangle implements Cloneable {
public static double RADIUS = 0.5; public static double RADIUS = 0.5;
private final BlockVector3 pos1;
private final BlockVector3 pos2;
private final BlockVector3 pos3;
private final double[][] verts = new double[3][3]; private final double[][] verts = new double[3][3];
private final double[] center = new double[3]; private final double[] center = new double[3];
private final double[] radius = new double[3]; private final double[] radius = new double[3];
@ -28,6 +32,9 @@ public class Triangle {
private final double b; private final double b;
public Triangle(BlockVector3 pos1, BlockVector3 pos2, BlockVector3 pos3) { public Triangle(BlockVector3 pos1, BlockVector3 pos2, BlockVector3 pos3) {
this.pos1 = pos1;
this.pos2 = pos2;
this.pos3 = pos3;
verts[0] = new double[]{pos1.x(), pos1.y(), pos1.z()}; verts[0] = new double[]{pos1.x(), pos1.y(), pos1.z()};
verts[1] = new double[]{pos2.x(), pos2.y(), pos2.z()}; verts[1] = new double[]{pos2.x(), pos2.y(), pos2.z()};
verts[2] = new double[]{pos3.x(), pos3.y(), pos3.z()}; verts[2] = new double[]{pos3.x(), pos3.y(), pos3.z()};
@ -290,4 +297,9 @@ public class Triangle {
return dot(normal, vmax) >= 0.0f; return dot(normal, vmax) >= 0.0f;
} }
@Override
public Triangle clone() {
return new Triangle(pos1, pos2, pos3);
}
} }

Datei anzeigen

@ -22,6 +22,8 @@ package com.sk89q.worldedit.command;
import com.fastasyncworldedit.core.Fawe; import com.fastasyncworldedit.core.Fawe;
import com.fastasyncworldedit.core.FaweVersion; import com.fastasyncworldedit.core.FaweVersion;
import com.fastasyncworldedit.core.configuration.Caption; import com.fastasyncworldedit.core.configuration.Caption;
import com.fastasyncworldedit.core.configuration.Settings;
import com.fastasyncworldedit.core.limit.FaweLimit;
import com.fastasyncworldedit.core.util.UpdateNotification; import com.fastasyncworldedit.core.util.UpdateNotification;
import com.intellectualsites.paster.IncendoPaster; import com.intellectualsites.paster.IncendoPaster;
import com.sk89q.worldedit.LocalSession; import com.sk89q.worldedit.LocalSession;
@ -97,6 +99,8 @@ public class WorldEditCommands {
.getConfiguration())); .getConfiguration()));
//FAWE start //FAWE start
Fawe.instance().setupConfigs(); Fawe.instance().setupConfigs();
FaweLimit.MAX.CONFIRM_LARGE =
Settings.settings().LIMITS.get("default").CONFIRM_LARGE || Settings.settings().GENERAL.LIMIT_UNLIMITED_CONFIRMS;
//FAWE end //FAWE end
actor.print(Caption.of("worldedit.reload.config")); actor.print(Caption.of("worldedit.reload.config"));
} }

Datei anzeigen

@ -106,7 +106,8 @@ public class MaskingExtent extends AbstractDelegateExtent implements IBatchProce
@Override @Override
public IChunkSet processSet(final IChunk chunk, final IChunkGet get, final IChunkSet set) { public IChunkSet processSet(final IChunk chunk, final IChunkGet get, final IChunkSet set) {
final ChunkFilterBlock filter = getOrCreateFilterBlock.apply(Thread.currentThread().getId()); final ChunkFilterBlock filter = getOrCreateFilterBlock.apply(Thread.currentThread().getId());
return filter.filter(chunk, get, set, MaskingExtent.this); filter.initChunk(chunk.getX(), chunk.getZ());
return filter.filter(chunk, get, set, this);
} }
@Override @Override

Datei anzeigen

@ -755,7 +755,6 @@ public class CuboidRegion extends AbstractRegion implements FlatRegion {
) { ) {
int chunkX = chunk.getX(); int chunkX = chunk.getX();
int chunkZ = chunk.getZ(); int chunkZ = chunk.getZ();
block = block.initChunk(chunkX, chunkZ);
//Chunk entry is an "interior chunk" in regards to the entire region, so filter the chunk whole instead of partially //Chunk entry is an "interior chunk" in regards to the entire region, so filter the chunk whole instead of partially
if ((minX + 15) >> 4 <= chunkX && (maxX - 15) >> 4 >= chunkX && (minZ + 15) >> 4 <= chunkZ && (maxZ - 15) >> 4 >= chunkZ) { if ((minX + 15) >> 4 <= chunkX && (maxX - 15) >> 4 >= chunkX && (minZ + 15) >> 4 <= chunkZ && (maxZ - 15) >> 4 >= chunkZ) {

Datei anzeigen

@ -412,8 +412,6 @@ public class EllipsoidRegion extends AbstractRegion {
return; return;
} }
block = block.initChunk(chunk.getX(), chunk.getZ());
// Get the solid layers // Get the solid layers
int cy = center.y(); int cy = center.y();
int diffYFull = MathMan.usqrt(diffY2); int diffYFull = MathMan.usqrt(diffY2);

Datei anzeigen

@ -268,7 +268,6 @@ public interface Region extends Iterable<BlockVector3>, Cloneable, IBatchProcess
) { ) {
int minSection = Math.max(get.getMinSectionPosition(), getMinimumY() >> 4); int minSection = Math.max(get.getMinSectionPosition(), getMinimumY() >> 4);
int maxSection = Math.min(get.getMaxSectionPosition(), getMaximumY() >> 4); int maxSection = Math.min(get.getMaxSectionPosition(), getMaximumY() >> 4);
block = block.initChunk(chunk.getX(), chunk.getZ());
for (int layer = minSection; layer <= maxSection; layer++) { for (int layer = minSection; layer <= maxSection; layer++) {
if ((!full && !get.hasSection(layer)) || !filter.appliesLayer(chunk, layer)) { if ((!full && !get.hasSection(layer)) || !filter.appliesLayer(chunk, layer)) {
return; return;