3
0
Mirror von https://github.com/GeyserMC/Geyser.git synchronisiert 2024-12-26 16:12:46 +01:00

TEMPORARY fix for chunk memory leaks

Dieser Commit ist enthalten in:
Camotoy 2023-04-24 23:48:05 -04:00
Ursprung abba88112a
Commit b66088e434
2 geänderte Dateien mit 9 neuen und 3 gelöschten Zeilen

Datei anzeigen

@ -105,6 +105,7 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
int maxBedrockSectionY = (bedrockDimension.height() >> 4) - 1; int maxBedrockSectionY = (bedrockDimension.height() >> 4) - 1;
int sectionCount; int sectionCount;
byte[] payload;
ByteBuf byteBuf = null; ByteBuf byteBuf = null;
GeyserChunkSection[] sections = new GeyserChunkSection[javaChunks.length - (yOffset + (bedrockDimension.minY() >> 4))]; GeyserChunkSection[] sections = new GeyserChunkSection[javaChunks.length - (yOffset + (bedrockDimension.minY() >> 4))];
@ -347,7 +348,8 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
for (NbtMap blockEntity : bedrockBlockEntities) { for (NbtMap blockEntity : bedrockBlockEntities) {
nbtStream.writeTag(blockEntity); nbtStream.writeTag(blockEntity);
} }
byteBuf.retain(); payload = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(payload);
} catch (IOException e) { } catch (IOException e) {
session.getGeyser().getLogger().error("IO error while encoding chunk", e); session.getGeyser().getLogger().error("IO error while encoding chunk", e);
return; return;
@ -362,7 +364,7 @@ public class JavaLevelChunkWithLightTranslator extends PacketTranslator<Clientbo
levelChunkPacket.setCachingEnabled(false); levelChunkPacket.setCachingEnabled(false);
levelChunkPacket.setChunkX(packet.getX()); levelChunkPacket.setChunkX(packet.getX());
levelChunkPacket.setChunkZ(packet.getZ()); levelChunkPacket.setChunkZ(packet.getZ());
levelChunkPacket.setData(byteBuf); levelChunkPacket.setData(Unpooled.wrappedBuffer(payload));
session.sendUpstreamPacket(levelChunkPacket); session.sendUpstreamPacket(levelChunkPacket);
if (!lecterns.isEmpty()) { if (!lecterns.isEmpty()) {

Datei anzeigen

@ -175,6 +175,7 @@ public class ChunkUtils {
BedrockDimension bedrockDimension = session.getChunkCache().getBedrockDimension(); BedrockDimension bedrockDimension = session.getChunkCache().getBedrockDimension();
int bedrockSubChunkCount = bedrockDimension.height() >> 4; int bedrockSubChunkCount = bedrockDimension.height() >> 4;
byte[] payload;
// Allocate output buffer // Allocate output buffer
ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer(ChunkUtils.EMPTY_BIOME_DATA.length * bedrockSubChunkCount + 1); // Consists only of biome data and border blocks ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer(ChunkUtils.EMPTY_BIOME_DATA.length * bedrockSubChunkCount + 1); // Consists only of biome data and border blocks
try { try {
@ -185,11 +186,14 @@ public class ChunkUtils {
byteBuf.writeByte(0); // Border blocks - Edu edition only byteBuf.writeByte(0); // Border blocks - Edu edition only
payload = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(payload);
LevelChunkPacket data = new LevelChunkPacket(); LevelChunkPacket data = new LevelChunkPacket();
data.setChunkX(chunkX); data.setChunkX(chunkX);
data.setChunkZ(chunkZ); data.setChunkZ(chunkZ);
data.setSubChunksLength(0); data.setSubChunksLength(0);
data.setData(byteBuf.retain()); data.setData(Unpooled.wrappedBuffer(payload));
data.setCachingEnabled(false); data.setCachingEnabled(false);
session.sendUpstreamPacket(data); session.sendUpstreamPacket(data);
} finally { } finally {