Mirror von
https://github.com/GeyserMC/Geyser.git
synchronisiert 2024-11-08 17:20:20 +01:00
Fix biomes crashing and more (huge thanks to @dktapps
Dieser Commit ist enthalten in:
Ursprung
c7d4130a44
Commit
feb64e08df
@ -130,7 +130,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.GeyserMC</groupId>
|
<groupId>com.github.GeyserMC</groupId>
|
||||||
<artifactId>MCProtocolLib</artifactId>
|
<artifactId>MCProtocolLib</artifactId>
|
||||||
<version>6e318f5</version>
|
<version>e427237</version>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
|
@ -41,7 +41,6 @@ public class UpstreamSession {
|
|||||||
private boolean initialized = false;
|
private boolean initialized = false;
|
||||||
|
|
||||||
public void sendPacket(@NonNull BedrockPacket packet) {
|
public void sendPacket(@NonNull BedrockPacket packet) {
|
||||||
System.out.println(packet);
|
|
||||||
if (!isClosed()) {
|
if (!isClosed()) {
|
||||||
session.sendPacket(packet);
|
session.sendPacket(packet);
|
||||||
}
|
}
|
||||||
|
@ -74,19 +74,19 @@ public class ChunkCache {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (y < minY || (y >> 4) > column.getChunks().length - 1) {
|
if (y < minY || ((y - minY) >> 4) > column.getChunks().length - 1) {
|
||||||
// Y likely goes above or below the height limit of this world
|
// Y likely goes above or below the height limit of this world
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Chunk chunk = column.getChunks()[(y >> 4) - getChunkMinY()];
|
Chunk chunk = column.getChunks()[(y - minY) >> 4];
|
||||||
if (chunk == null) {
|
if (chunk == null) {
|
||||||
if (block != BlockStateValues.JAVA_AIR_ID) {
|
if (block != BlockStateValues.JAVA_AIR_ID) {
|
||||||
// A previously empty chunk, which is no longer empty as a block has been added to it
|
// A previously empty chunk, which is no longer empty as a block has been added to it
|
||||||
chunk = new Chunk();
|
chunk = new Chunk();
|
||||||
// Fixes the chunk assuming that all blocks is the `block` variable we are updating. /shrug
|
// Fixes the chunk assuming that all blocks is the `block` variable we are updating. /shrug
|
||||||
chunk.getPalette().stateToId(BlockStateValues.JAVA_AIR_ID);
|
chunk.getPalette().stateToId(BlockStateValues.JAVA_AIR_ID);
|
||||||
column.getChunks()[(y >> 4) - getChunkMinY()] = chunk;
|
column.getChunks()[(y - minY) >> 4] = chunk;
|
||||||
} else {
|
} else {
|
||||||
// Nothing to update
|
// Nothing to update
|
||||||
return;
|
return;
|
||||||
@ -106,12 +106,12 @@ public class ChunkCache {
|
|||||||
return BlockStateValues.JAVA_AIR_ID;
|
return BlockStateValues.JAVA_AIR_ID;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (y < minY || (y >> 4) > column.getChunks().length - 1) {
|
if (y < minY || ((y - minY) >> 4) > column.getChunks().length - 1) {
|
||||||
// Y likely goes above or below the height limit of this world
|
// Y likely goes above or below the height limit of this world
|
||||||
return BlockStateValues.JAVA_AIR_ID;
|
return BlockStateValues.JAVA_AIR_ID;
|
||||||
}
|
}
|
||||||
|
|
||||||
Chunk chunk = column.getChunks()[(y >> 4) - getChunkMinY()];
|
Chunk chunk = column.getChunks()[(y - minY) >> 4];
|
||||||
if (chunk != null) {
|
if (chunk != null) {
|
||||||
return chunk.get(x & 0xF, y & 0xF, z & 0xF);
|
return chunk.get(x & 0xF, y & 0xF, z & 0xF);
|
||||||
}
|
}
|
||||||
|
@ -37,14 +37,16 @@ import io.netty.buffer.ByteBufAllocator;
|
|||||||
import io.netty.buffer.ByteBufOutputStream;
|
import io.netty.buffer.ByteBufOutputStream;
|
||||||
import org.geysermc.connector.GeyserConnector;
|
import org.geysermc.connector.GeyserConnector;
|
||||||
import org.geysermc.connector.network.session.GeyserSession;
|
import org.geysermc.connector.network.session.GeyserSession;
|
||||||
import org.geysermc.connector.utils.BiomeUtils;
|
|
||||||
import org.geysermc.connector.network.translators.PacketTranslator;
|
import org.geysermc.connector.network.translators.PacketTranslator;
|
||||||
import org.geysermc.connector.network.translators.Translator;
|
import org.geysermc.connector.network.translators.Translator;
|
||||||
import org.geysermc.connector.network.translators.world.chunk.ChunkSection;
|
import org.geysermc.connector.network.translators.world.chunk.ChunkSection;
|
||||||
|
import org.geysermc.connector.utils.BiomeUtils;
|
||||||
import org.geysermc.connector.utils.ChunkUtils;
|
import org.geysermc.connector.utils.ChunkUtils;
|
||||||
|
|
||||||
@Translator(packet = ServerChunkDataPacket.class)
|
@Translator(packet = ServerChunkDataPacket.class)
|
||||||
public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPacket> {
|
public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPacket> {
|
||||||
|
// Caves and cliffs supports 3D biomes by implementing a very similar palette system to blocks
|
||||||
|
private static final boolean NEW_BIOME_WRITE = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void translate(ServerChunkDataPacket packet, GeyserSession session) {
|
public void translate(ServerChunkDataPacket packet, GeyserSession session) {
|
||||||
@ -79,7 +81,7 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
|
|||||||
ChunkSection section = sections[i];
|
ChunkSection section = sections[i];
|
||||||
size += (section != null ? section : session.getBlockMappings().getEmptyChunkSection()).estimateNetworkSize();
|
size += (section != null ? section : session.getBlockMappings().getEmptyChunkSection()).estimateNetworkSize();
|
||||||
}
|
}
|
||||||
size += 256; // Biomes
|
size += 256; // Biomes pre-1.18
|
||||||
size += 1; // Border blocks
|
size += 1; // Border blocks
|
||||||
size += 1; // Extra data length (always 0)
|
size += 1; // Extra data length (always 0)
|
||||||
size += chunkData.getBlockEntities().length * 64; // Conservative estimate of 64 bytes per tile entity
|
size += chunkData.getBlockEntities().length * 64; // Conservative estimate of 64 bytes per tile entity
|
||||||
@ -93,7 +95,18 @@ public class JavaChunkDataTranslator extends PacketTranslator<ServerChunkDataPac
|
|||||||
(section != null ? section : session.getBlockMappings().getEmptyChunkSection()).writeToNetwork(byteBuf);
|
(section != null ? section : session.getBlockMappings().getEmptyChunkSection()).writeToNetwork(byteBuf);
|
||||||
}
|
}
|
||||||
|
|
||||||
byteBuf.writeBytes(BiomeUtils.toBedrockBiome(column.getBiomeData())); // Biomes - 256 bytes
|
if (NEW_BIOME_WRITE) {
|
||||||
|
for (int i = 0; i < sectionCount; i++) {
|
||||||
|
BiomeUtils.toNewBedrockBiome(column.getBiomeData(), i).writeToNetwork(byteBuf);
|
||||||
|
}
|
||||||
|
|
||||||
|
int remainingEmptyBiomes = 32 - sectionCount;
|
||||||
|
for (int i = 0; i < remainingEmptyBiomes; i++) {
|
||||||
|
byteBuf.writeBytes(ChunkUtils.EMPTY_BIOME_DATA);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
byteBuf.writeBytes(BiomeUtils.toBedrockBiome(column.getBiomeData())); // Biomes - 256 bytes
|
||||||
|
}
|
||||||
byteBuf.writeByte(0); // Border blocks - Edu edition only
|
byteBuf.writeByte(0); // Border blocks - Edu edition only
|
||||||
VarInts.writeUnsignedInt(byteBuf, 0); // extra data length, 0 for now
|
VarInts.writeUnsignedInt(byteBuf, 0); // extra data length, 0 for now
|
||||||
|
|
||||||
|
@ -25,6 +25,8 @@
|
|||||||
|
|
||||||
package org.geysermc.connector.utils;
|
package org.geysermc.connector.utils;
|
||||||
|
|
||||||
|
import org.geysermc.connector.network.translators.world.chunk.BlockStorage;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
// Based off of ProtocolSupport's LegacyBiomeData.java:
|
// Based off of ProtocolSupport's LegacyBiomeData.java:
|
||||||
@ -40,7 +42,7 @@ public class BiomeUtils {
|
|||||||
for (int y = 0; y < 16; y += 4) {
|
for (int y = 0; y < 16; y += 4) {
|
||||||
for (int z = 0; z < 16; z += 4) {
|
for (int z = 0; z < 16; z += 4) {
|
||||||
for (int x = 0; x < 16; x += 4) {
|
for (int x = 0; x < 16; x += 4) {
|
||||||
byte biomeId = biomeID(biomeData, x, y, z);
|
byte biomeId = (byte) biomeID(biomeData, x, y, z);
|
||||||
int offset = ((z + (y / 4)) << 4) | x;
|
int offset = ((z + (y / 4)) << 4) | x;
|
||||||
Arrays.fill(bedrockData, offset, offset + 4, biomeId);
|
Arrays.fill(bedrockData, offset, offset + 4, biomeId);
|
||||||
}
|
}
|
||||||
@ -49,7 +51,25 @@ public class BiomeUtils {
|
|||||||
return bedrockData;
|
return bedrockData;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static byte biomeID(int[] biomeData, int x, int y, int z) {
|
public static BlockStorage toNewBedrockBiome(int[] biomeData, int ySection) {
|
||||||
|
BlockStorage storage = new BlockStorage(0);
|
||||||
|
int blockY = ySection << 4;
|
||||||
|
int i = 0;
|
||||||
|
// Iterate over biomes like a chunk, grab the biome from Java, and add it to Bedrock's biome palette
|
||||||
|
// Might be able to be optimized by iterating over Java's biome data section?? Unsure.
|
||||||
|
for (int x = 0; x < 16; x++) {
|
||||||
|
for (int z = 0; z < 16; z++) {
|
||||||
|
for (int y = blockY; y < (blockY + 16); y++) {
|
||||||
|
int biomeId = biomeID(biomeData, x, y, z);
|
||||||
|
storage.setFullBlock(i, biomeId);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int biomeID(int[] biomeData, int x, int y, int z) {
|
||||||
int biomeId = biomeData[((y >> 2) & 63) << 4 | ((z >> 2) & 3) << 2 | ((x >> 2) & 3)];
|
int biomeId = biomeData[((y >> 2) & 63) << 4 | ((z >> 2) & 3) << 2 | ((x >> 2) & 3)];
|
||||||
if (biomeId == 0) {
|
if (biomeId == 0) {
|
||||||
biomeId = 42; // Ocean
|
biomeId = 42; // Ocean
|
||||||
@ -58,6 +78,6 @@ public class BiomeUtils {
|
|||||||
} else if (biomeId >= 170) { // Nether biomes. Dunno why it's like this :microjang:
|
} else if (biomeId >= 170) { // Nether biomes. Dunno why it's like this :microjang:
|
||||||
biomeId += 8;
|
biomeId += 8;
|
||||||
}
|
}
|
||||||
return (byte) biomeId;
|
return biomeId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,8 @@ import com.nukkitx.nbt.NbtMap;
|
|||||||
import com.nukkitx.protocol.bedrock.packet.LevelChunkPacket;
|
import com.nukkitx.protocol.bedrock.packet.LevelChunkPacket;
|
||||||
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
|
import com.nukkitx.protocol.bedrock.packet.NetworkChunkPublisherUpdatePacket;
|
||||||
import com.nukkitx.protocol.bedrock.packet.UpdateBlockPacket;
|
import com.nukkitx.protocol.bedrock.packet.UpdateBlockPacket;
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.Unpooled;
|
||||||
import it.unimi.dsi.fastutil.ints.IntArrayList;
|
import it.unimi.dsi.fastutil.ints.IntArrayList;
|
||||||
import it.unimi.dsi.fastutil.ints.IntList;
|
import it.unimi.dsi.fastutil.ints.IntList;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@ -71,11 +73,41 @@ public class ChunkUtils {
|
|||||||
/**
|
/**
|
||||||
* The minimum height Bedrock Edition will accept.
|
* The minimum height Bedrock Edition will accept.
|
||||||
*/
|
*/
|
||||||
private static final int MINIMUM_ACCEPTED_HEIGHT = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? -64 : 0;
|
private static final int MINIMUM_ACCEPTED_HEIGHT = 0;
|
||||||
|
private static final int MINIMUM_ACCEPTED_HEIGHT_OVERWORLD = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? -64 : MINIMUM_ACCEPTED_HEIGHT;
|
||||||
/**
|
/**
|
||||||
* The maximum chunk height Bedrock Edition will accept, from the lowest point to the highest.
|
* The maximum chunk height Bedrock Edition will accept, from the lowest point to the highest.
|
||||||
*/
|
*/
|
||||||
private static final int MAXIMUM_ACCEPTED_HEIGHT = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? 380 : 256;
|
private static final int MAXIMUM_ACCEPTED_HEIGHT = 256;
|
||||||
|
private static final int MAXIMUM_ACCEPTED_HEIGHT_OVERWORLD = GeyserConnector.getInstance().getConfig().isExtendedWorldHeight() ? 384 : MAXIMUM_ACCEPTED_HEIGHT;
|
||||||
|
|
||||||
|
private static final byte[] EMPTY_CHUNK_DATA;
|
||||||
|
public static final byte[] EMPTY_BIOME_DATA;
|
||||||
|
|
||||||
|
static {
|
||||||
|
ByteBuf byteBuf = Unpooled.buffer();
|
||||||
|
try {
|
||||||
|
BlockStorage blockStorage = new BlockStorage(0);
|
||||||
|
blockStorage.writeToNetwork(byteBuf);
|
||||||
|
|
||||||
|
EMPTY_BIOME_DATA = new byte[byteBuf.readableBytes()];
|
||||||
|
byteBuf.readBytes(EMPTY_BIOME_DATA);
|
||||||
|
} finally {
|
||||||
|
byteBuf.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
byteBuf = Unpooled.buffer();
|
||||||
|
try {
|
||||||
|
for (int i = 0; i < 32; i++) {
|
||||||
|
byteBuf.writeBytes(EMPTY_BIOME_DATA);
|
||||||
|
}
|
||||||
|
|
||||||
|
EMPTY_CHUNK_DATA = new byte[byteBuf.readableBytes()];
|
||||||
|
byteBuf.readBytes(EMPTY_CHUNK_DATA);
|
||||||
|
} finally {
|
||||||
|
byteBuf.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static int indexYZXtoXZY(int yzx) {
|
private static int indexYZXtoXZY(int yzx) {
|
||||||
return (yzx >> 8) | (yzx & 0x0F0) | ((yzx & 0x00F) << 8);
|
return (yzx >> 8) | (yzx & 0x0F0) | ((yzx & 0x00F) << 8);
|
||||||
@ -91,8 +123,10 @@ public class ChunkUtils {
|
|||||||
BitSet waterloggedPaletteIds = new BitSet();
|
BitSet waterloggedPaletteIds = new BitSet();
|
||||||
BitSet pistonOrFlowerPaletteIds = new BitSet();
|
BitSet pistonOrFlowerPaletteIds = new BitSet();
|
||||||
|
|
||||||
|
boolean overworld = session.getDimension().equals(DimensionUtils.OVERWORLD);
|
||||||
|
|
||||||
for (int sectionY = 0; sectionY < javaSections.length; sectionY++) {
|
for (int sectionY = 0; sectionY < javaSections.length; sectionY++) {
|
||||||
if (yOffset < MINIMUM_ACCEPTED_HEIGHT && sectionY < -yOffset) {
|
if (yOffset < ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4) && sectionY < -yOffset) {
|
||||||
// Ignore this chunk since it goes below the accepted height limit
|
// Ignore this chunk since it goes below the accepted height limit
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -128,7 +162,7 @@ public class ChunkUtils {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sections[sectionY + yOffset] = section;
|
sections[sectionY + (yOffset - ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4))] = section;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -201,7 +235,7 @@ public class ChunkUtils {
|
|||||||
layers = new BlockStorage[]{ layer0, new BlockStorage(BitArrayVersion.V1.createArray(BlockStorage.SIZE, layer1Data), layer1Palette) };
|
layers = new BlockStorage[]{ layer0, new BlockStorage(BitArrayVersion.V1.createArray(BlockStorage.SIZE, layer1Data), layer1Palette) };
|
||||||
}
|
}
|
||||||
|
|
||||||
sections[sectionY + yOffset] = new ChunkSection(layers);
|
sections[sectionY + (yOffset - ((overworld ? MINIMUM_ACCEPTED_HEIGHT_OVERWORLD : MINIMUM_ACCEPTED_HEIGHT) >> 4))] = new ChunkSection(layers);
|
||||||
}
|
}
|
||||||
|
|
||||||
CompoundTag[] blockEntities = column.getTileEntities();
|
CompoundTag[] blockEntities = column.getTileEntities();
|
||||||
@ -383,7 +417,7 @@ public class ChunkUtils {
|
|||||||
data.setChunkX(chunkX + x);
|
data.setChunkX(chunkX + x);
|
||||||
data.setChunkZ(chunkZ + z);
|
data.setChunkZ(chunkZ + z);
|
||||||
data.setSubChunksLength(0);
|
data.setSubChunksLength(0);
|
||||||
data.setData(new byte[0]);
|
data.setData(EMPTY_CHUNK_DATA);
|
||||||
data.setCachingEnabled(false);
|
data.setCachingEnabled(false);
|
||||||
session.sendUpstreamPacket(data);
|
session.sendUpstreamPacket(data);
|
||||||
|
|
||||||
|
Laden…
In neuem Issue referenzieren
Einen Benutzer sperren