Mirror von
https://github.com/ViaVersion/ViaVersion.git
synchronisiert 2024-11-03 14:50:30 +01:00
Map biomes, fix registry sending
Dieser Commit ist enthalten in:
Ursprung
3c20b79573
Commit
3ef1912267
@ -14,7 +14,6 @@ import java.io.FileReader;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.net.URL;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
@ -209,8 +208,4 @@ public class MappingDataLoader {
|
|||||||
public static InputStream getResource(String name) {
|
public static InputStream getResource(String name) {
|
||||||
return MappingDataLoader.class.getClassLoader().getResourceAsStream("assets/viaversion/data/" + name);
|
return MappingDataLoader.class.getClassLoader().getResourceAsStream("assets/viaversion/data/" + name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static URL getResourceUrl(String name) {
|
|
||||||
return MappingDataLoader.class.getClassLoader().getResource("assets/viaversion/data/" + name);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -23,10 +23,21 @@
|
|||||||
*/
|
*/
|
||||||
package us.myles.ViaVersion.api.minecraft.nbt;
|
package us.myles.ViaVersion.api.minecraft.nbt;
|
||||||
|
|
||||||
|
import com.github.steveice10.opennbt.tag.TagRegistry;
|
||||||
import com.github.steveice10.opennbt.tag.builtin.CompoundTag;
|
import com.github.steveice10.opennbt.tag.builtin.CompoundTag;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
|
||||||
|
import java.io.DataInput;
|
||||||
|
import java.io.DataInputStream;
|
||||||
|
import java.io.DataOutput;
|
||||||
|
import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.zip.GZIPInputStream;
|
||||||
|
import java.util.zip.GZIPOutputStream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See https://github.com/KyoriPowered/adventure.
|
* See https://github.com/KyoriPowered/adventure.
|
||||||
@ -35,6 +46,139 @@ public final class BinaryTagIO {
|
|||||||
private BinaryTagIO() {
|
private BinaryTagIO() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a compound tag from {@code path}.
|
||||||
|
*
|
||||||
|
* @param path the path
|
||||||
|
* @return the compound tag
|
||||||
|
* @throws IOException if an exception was encountered while reading a compound tag
|
||||||
|
*/
|
||||||
|
@NotNull
|
||||||
|
public static CompoundTag readPath(final @NotNull Path path) throws IOException {
|
||||||
|
return readInputStream(Files.newInputStream(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a compound tag from an input stream.
|
||||||
|
*
|
||||||
|
* @param input the input stream
|
||||||
|
* @return the compound tag
|
||||||
|
* @throws IOException if an exception was encountered while reading a compound tag
|
||||||
|
*/
|
||||||
|
@NotNull
|
||||||
|
public static CompoundTag readInputStream(final @NotNull InputStream input) throws IOException {
|
||||||
|
try (final DataInputStream dis = new DataInputStream(input)) {
|
||||||
|
return readDataInput(dis);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a compound tag from {@code path} using GZIP decompression.
|
||||||
|
*
|
||||||
|
* @param path the path
|
||||||
|
* @return the compound tag
|
||||||
|
* @throws IOException if an exception was encountered while reading a compound tag
|
||||||
|
*/
|
||||||
|
@NotNull
|
||||||
|
public static CompoundTag readCompressedPath(final @NotNull Path path) throws IOException {
|
||||||
|
return readCompressedInputStream(Files.newInputStream(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a compound tag from an input stream using GZIP decompression.
|
||||||
|
*
|
||||||
|
* @param input the input stream
|
||||||
|
* @return the compound tag
|
||||||
|
* @throws IOException if an exception was encountered while reading a compound tag
|
||||||
|
*/
|
||||||
|
@NotNull
|
||||||
|
public static CompoundTag readCompressedInputStream(final @NotNull InputStream input) throws IOException {
|
||||||
|
try (final DataInputStream dis = new DataInputStream(new GZIPInputStream(input))) {
|
||||||
|
return readDataInput(dis);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a compound tag from {@code input}.
|
||||||
|
*
|
||||||
|
* @param input the input
|
||||||
|
* @return the compound tag
|
||||||
|
* @throws IOException if an exception was encountered while reading a compound tag
|
||||||
|
*/
|
||||||
|
@NotNull
|
||||||
|
public static CompoundTag readDataInput(final @NotNull DataInput input) throws IOException {
|
||||||
|
byte type = input.readByte();
|
||||||
|
if (type != TagRegistry.getIdFor(CompoundTag.class)) {
|
||||||
|
throw new IOException(String.format("Expected root tag to be a CompoundTag, was %s", type));
|
||||||
|
}
|
||||||
|
input.skipBytes(input.readUnsignedShort()); // read empty name
|
||||||
|
|
||||||
|
final CompoundTag compoundTag = new CompoundTag("");
|
||||||
|
compoundTag.read(input);
|
||||||
|
return compoundTag;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a compound tag to {@code path}.
|
||||||
|
*
|
||||||
|
* @param tag the compound tag
|
||||||
|
* @param path the path
|
||||||
|
* @throws IOException if an exception was encountered while writing the compound tag
|
||||||
|
*/
|
||||||
|
public static void writePath(final @NotNull CompoundTag tag, final @NotNull Path path) throws IOException {
|
||||||
|
writeOutputStream(tag, Files.newOutputStream(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a compound tag to an output stream.
|
||||||
|
*
|
||||||
|
* @param tag the compound tag
|
||||||
|
* @param output the output stream
|
||||||
|
* @throws IOException if an exception was encountered while writing the compound tag
|
||||||
|
*/
|
||||||
|
public static void writeOutputStream(final @NotNull CompoundTag tag, final @NotNull OutputStream output) throws IOException {
|
||||||
|
try (final DataOutputStream dos = new DataOutputStream(output)) {
|
||||||
|
writeDataOutput(tag, dos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a compound tag to {@code path} using GZIP compression.
|
||||||
|
*
|
||||||
|
* @param tag the compound tag
|
||||||
|
* @param path the path
|
||||||
|
* @throws IOException if an exception was encountered while writing the compound tag
|
||||||
|
*/
|
||||||
|
public static void writeCompressedPath(final @NotNull CompoundTag tag, final @NotNull Path path) throws IOException {
|
||||||
|
writeCompressedOutputStream(tag, Files.newOutputStream(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a compound tag to an output stream using GZIP compression.
|
||||||
|
*
|
||||||
|
* @param tag the compound tag
|
||||||
|
* @param output the output stream
|
||||||
|
* @throws IOException if an exception was encountered while writing the compound tag
|
||||||
|
*/
|
||||||
|
public static void writeCompressedOutputStream(final @NotNull CompoundTag tag, final @NotNull OutputStream output) throws IOException {
|
||||||
|
try (final DataOutputStream dos = new DataOutputStream(new GZIPOutputStream(output))) {
|
||||||
|
writeDataOutput(tag, dos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a compound tag to {@code output}.
|
||||||
|
*
|
||||||
|
* @param tag the compound tag
|
||||||
|
* @param output the output
|
||||||
|
* @throws IOException if an exception was encountered while writing the compound tag
|
||||||
|
*/
|
||||||
|
public static void writeDataOutput(final @NotNull CompoundTag tag, final @NotNull DataOutput output) throws IOException {
|
||||||
|
output.writeByte(TagRegistry.getIdFor(CompoundTag.class));
|
||||||
|
output.writeUTF(""); // write empty name
|
||||||
|
tag.write(output);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads a compound tag from a {@link String}.
|
* Reads a compound tag from a {@link String}.
|
||||||
*
|
*
|
||||||
|
@ -77,6 +77,7 @@ public class Protocol1_16_2To1_16_1 extends Protocol<ClientboundPackets1_16, Cli
|
|||||||
protected void loadMappingData() {
|
protected void loadMappingData() {
|
||||||
MappingData.init();
|
MappingData.init();
|
||||||
|
|
||||||
|
tagRewriter.addTag(TagType.ITEM, "minecraft:stone_crafting_materials", 14, 962);
|
||||||
tagRewriter.addEmptyTag(TagType.BLOCK, "minecraft:mushroom_grow_block");
|
tagRewriter.addEmptyTag(TagType.BLOCK, "minecraft:mushroom_grow_block");
|
||||||
|
|
||||||
// The client now wants ALL (previous) tags to be sent, sooooo :>
|
// The client now wants ALL (previous) tags to be sent, sooooo :>
|
||||||
@ -87,7 +88,8 @@ public class Protocol1_16_2To1_16_1 extends Protocol<ClientboundPackets1_16, Cli
|
|||||||
"minecraft:wither_summon_base_blocks", "minecraft:infiniburn_overworld", "minecraft:piglin_repellents",
|
"minecraft:wither_summon_base_blocks", "minecraft:infiniburn_overworld", "minecraft:piglin_repellents",
|
||||||
"minecraft:hoglin_repellents", "minecraft:prevent_mob_spawning_inside", "minecraft:wart_blocks",
|
"minecraft:hoglin_repellents", "minecraft:prevent_mob_spawning_inside", "minecraft:wart_blocks",
|
||||||
"minecraft:stone_pressure_plates", "minecraft:nylium", "minecraft:gold_ores", "minecraft:pressure_plates",
|
"minecraft:stone_pressure_plates", "minecraft:nylium", "minecraft:gold_ores", "minecraft:pressure_plates",
|
||||||
"minecraft:logs_that_burn", "minecraft:strider_warm_blocks", "minecraft:warped_stems", "minecraft:infiniburn_end");
|
"minecraft:logs_that_burn", "minecraft:strider_warm_blocks", "minecraft:warped_stems", "minecraft:infiniburn_end",
|
||||||
|
"minecraft:base_stone_nether", "minecraft:base_stone_overworld");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int getNewBlockStateId(int id) {
|
public static int getNewBlockStateId(int id) {
|
||||||
|
@ -0,0 +1,48 @@
|
|||||||
|
package us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.data;
|
||||||
|
|
||||||
|
import it.unimi.dsi.fastutil.ints.Int2IntMap;
|
||||||
|
import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
|
||||||
|
|
||||||
|
public class BiomeMappings {
|
||||||
|
|
||||||
|
private static final Int2IntMap BIOMES = new Int2IntOpenHashMap();
|
||||||
|
|
||||||
|
static {
|
||||||
|
BIOMES.put(127, 51);
|
||||||
|
BIOMES.put(129, 52);
|
||||||
|
BIOMES.put(130, 53);
|
||||||
|
BIOMES.put(131, 54);
|
||||||
|
BIOMES.put(132, 55);
|
||||||
|
BIOMES.put(133, 56);
|
||||||
|
BIOMES.put(134, 57);
|
||||||
|
BIOMES.put(140, 58);
|
||||||
|
BIOMES.put(149, 59);
|
||||||
|
BIOMES.put(151, 60);
|
||||||
|
BIOMES.put(155, 61);
|
||||||
|
BIOMES.put(156, 62);
|
||||||
|
BIOMES.put(157, 63);
|
||||||
|
BIOMES.put(158, 64);
|
||||||
|
BIOMES.put(160, 65);
|
||||||
|
BIOMES.put(161, 66);
|
||||||
|
BIOMES.put(162, 67);
|
||||||
|
BIOMES.put(163, 68);
|
||||||
|
BIOMES.put(164, 69);
|
||||||
|
BIOMES.put(165, 70);
|
||||||
|
BIOMES.put(166, 71);
|
||||||
|
BIOMES.put(167, 72);
|
||||||
|
BIOMES.put(168, 73);
|
||||||
|
BIOMES.put(169, 74);
|
||||||
|
BIOMES.put(170, 75);
|
||||||
|
BIOMES.put(171, 76);
|
||||||
|
BIOMES.put(172, 77);
|
||||||
|
BIOMES.put(173, 78);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Int2IntMap getBiomes() {
|
||||||
|
return BIOMES;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int getNewBiomeId(int biomeId) {
|
||||||
|
return BIOMES.getOrDefault(biomeId, biomeId);
|
||||||
|
}
|
||||||
|
}
|
@ -1,14 +1,13 @@
|
|||||||
package us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.data;
|
package us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.data;
|
||||||
|
|
||||||
import com.github.steveice10.opennbt.NBTIO;
|
|
||||||
import com.github.steveice10.opennbt.tag.builtin.CompoundTag;
|
import com.github.steveice10.opennbt.tag.builtin.CompoundTag;
|
||||||
import com.google.gson.JsonObject;
|
import com.google.gson.JsonObject;
|
||||||
import us.myles.ViaVersion.api.Via;
|
import us.myles.ViaVersion.api.Via;
|
||||||
import us.myles.ViaVersion.api.data.MappingDataLoader;
|
import us.myles.ViaVersion.api.data.MappingDataLoader;
|
||||||
import us.myles.ViaVersion.api.data.Mappings;
|
import us.myles.ViaVersion.api.data.Mappings;
|
||||||
|
import us.myles.ViaVersion.api.minecraft.nbt.BinaryTagIO;
|
||||||
import us.myles.ViaVersion.util.Int2IntBiMap;
|
import us.myles.ViaVersion.util.Int2IntBiMap;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class MappingData {
|
public class MappingData {
|
||||||
@ -24,7 +23,7 @@ public class MappingData {
|
|||||||
JsonObject mapping1_16_2 = MappingDataLoader.loadData("mapping-1.16.2.json", true);
|
JsonObject mapping1_16_2 = MappingDataLoader.loadData("mapping-1.16.2.json", true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dimensionRegistry = NBTIO.readFile(new File(MappingDataLoader.getResourceUrl("dimenstion-registry-1.16.2.nbt").getFile()));
|
dimensionRegistry = BinaryTagIO.readCompressedInputStream(MappingDataLoader.getResource("dimension-registry-1.16.2.nbt"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Via.getPlatform().getLogger().severe("Error loading dimenstion registry:");
|
Via.getPlatform().getLogger().severe("Error loading dimenstion registry:");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -11,6 +11,7 @@ import us.myles.ViaVersion.api.rewriters.BlockRewriter;
|
|||||||
import us.myles.ViaVersion.api.type.Type;
|
import us.myles.ViaVersion.api.type.Type;
|
||||||
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.ClientboundPackets1_16_2;
|
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.ClientboundPackets1_16_2;
|
||||||
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.Protocol1_16_2To1_16_1;
|
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.Protocol1_16_2To1_16_1;
|
||||||
|
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.data.BiomeMappings;
|
||||||
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.types.Chunk1_16_2Type;
|
import us.myles.ViaVersion.protocols.protocol1_16_2to1_16_1.types.Chunk1_16_2Type;
|
||||||
import us.myles.ViaVersion.protocols.protocol1_16to1_15_2.ClientboundPackets1_16;
|
import us.myles.ViaVersion.protocols.protocol1_16to1_15_2.ClientboundPackets1_16;
|
||||||
import us.myles.ViaVersion.protocols.protocol1_16to1_15_2.types.Chunk1_16Type;
|
import us.myles.ViaVersion.protocols.protocol1_16to1_15_2.types.Chunk1_16Type;
|
||||||
@ -38,6 +39,14 @@ public class WorldPackets {
|
|||||||
Chunk chunk = wrapper.read(new Chunk1_16Type(clientWorld));
|
Chunk chunk = wrapper.read(new Chunk1_16Type(clientWorld));
|
||||||
wrapper.write(new Chunk1_16_2Type(clientWorld), chunk);
|
wrapper.write(new Chunk1_16_2Type(clientWorld), chunk);
|
||||||
|
|
||||||
|
if (chunk.isBiomeData()) {
|
||||||
|
int[] biomes = chunk.getBiomeData();
|
||||||
|
for (int i = 0; i < biomes.length; i++) {
|
||||||
|
int biome = biomes[i];
|
||||||
|
biomes[i] = BiomeMappings.getNewBiomeId(biome);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (int s = 0; s < 16; s++) {
|
for (int s = 0; s < 16; s++) {
|
||||||
ChunkSection section = chunk.getSections()[s];
|
ChunkSection section = chunk.getSections()[s];
|
||||||
if (section == null) continue;
|
if (section == null) continue;
|
||||||
@ -73,7 +82,7 @@ public class WorldPackets {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Absolute y -> relative chunk section y
|
// Absolute y -> relative chunk section y
|
||||||
int blockId = Protocol1_16_2To1_16_1.getNewBlockId(record.getBlockId());
|
int blockId = Protocol1_16_2To1_16_1.getNewBlockStateId(record.getBlockId());
|
||||||
list.add(new BlockChangeRecord1_16_2(record.getSectionX(), record.getSectionY(), record.getSectionZ(), blockId));
|
list.add(new BlockChangeRecord1_16_2(record.getSectionX(), record.getSectionY(), record.getSectionZ(), blockId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Laden…
In neuem Issue referenzieren
Einen Benutzer sperren