-
Notifications
You must be signed in to change notification settings - Fork 420
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Initial support for 1.18.30 * Fix typos in DimensionEnum
- Loading branch information
Showing
14 changed files
with
999 additions
and
820 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
package cn.nukkit.level; | ||
|
||
import lombok.Builder; | ||
import lombok.Data; | ||
|
||
@Data | ||
public class DimensionData { | ||
private final int dimensionId; | ||
private final int minHeight; | ||
private final int maxHeight; | ||
private final int height; | ||
|
||
public DimensionData(int dimensionId, int minHeight, int maxHeight) { | ||
this.dimensionId = dimensionId; | ||
this.minHeight = minHeight; | ||
this.maxHeight = maxHeight; | ||
|
||
int height = maxHeight - minHeight; | ||
if (minHeight <= 0 && maxHeight > 0) { | ||
height += 1; // 0 y coordinate counts too | ||
} | ||
this.height = height; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
package cn.nukkit.level; | ||
|
||
public enum DimensionEnum { | ||
OVERWORLD(new DimensionData(Level.DIMENSION_OVERWORLD, -64, 319)), | ||
NETHER(new DimensionData(Level.DIMENSION_NETHER, 0, 127)), | ||
END(new DimensionData(Level.DIMENSION_THE_END, 0, 255)); | ||
|
||
private final DimensionData dimensionData; | ||
|
||
DimensionEnum(DimensionData dimensionData) { | ||
this.dimensionData = dimensionData; | ||
} | ||
|
||
public DimensionData getDimensionData() { | ||
return this.dimensionData; | ||
} | ||
|
||
public static DimensionData getDataFromId(int dimension) { | ||
for (DimensionEnum value : values()) { | ||
if (value.getDimensionData().getDimensionId() == dimension) { | ||
return value.getDimensionData(); | ||
} | ||
} | ||
return null; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
117 changes: 117 additions & 0 deletions
117
src/main/java/cn/nukkit/level/format/generic/serializer/NetworkChunkSerializer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
package cn.nukkit.level.format.generic.serializer; | ||
|
||
import cn.nukkit.blockentity.BlockEntity; | ||
import cn.nukkit.blockentity.BlockEntitySpawnable; | ||
import cn.nukkit.level.DimensionData; | ||
import cn.nukkit.level.Level; | ||
import cn.nukkit.level.biome.Biome; | ||
import cn.nukkit.level.format.ChunkSection; | ||
import cn.nukkit.level.format.generic.BaseChunk; | ||
import cn.nukkit.level.format.generic.BaseFullChunk; | ||
import cn.nukkit.level.util.PalettedBlockStorage; | ||
import cn.nukkit.nbt.NBTIO; | ||
import cn.nukkit.nbt.tag.CompoundTag; | ||
import cn.nukkit.utils.BinaryStream; | ||
import cn.nukkit.utils.ThreadCache; | ||
import it.unimi.dsi.fastutil.objects.ObjectArrayList; | ||
|
||
import java.io.IOException; | ||
import java.nio.ByteOrder; | ||
import java.util.List; | ||
import java.util.function.BiConsumer; | ||
|
||
public class NetworkChunkSerializer { | ||
|
||
private static final int EXTENDED_NEGATIVE_SUB_CHUNKS = 4; | ||
|
||
private static final byte[] negativeSubChunks; | ||
|
||
static { | ||
// Build up 4 SubChunks for the extended negative height | ||
BinaryStream stream = new BinaryStream(); | ||
for (int i = 0; i < EXTENDED_NEGATIVE_SUB_CHUNKS; i++) { | ||
stream.putByte((byte) 8); // SubChunk version | ||
stream.putByte((byte) 0); // 0 layers | ||
} | ||
negativeSubChunks = stream.getBuffer(); | ||
} | ||
|
||
public static void serialize(BaseChunk chunk, BiConsumer<BinaryStream, Integer> callback, DimensionData dimensionData) { | ||
byte[] blockEntities; | ||
if (chunk.getBlockEntities().isEmpty()) { | ||
blockEntities = new byte[0]; | ||
} else { | ||
blockEntities = serializeEntities(chunk); | ||
} | ||
|
||
int subChunkCount = 0; | ||
ChunkSection[] sections = chunk.getSections(); | ||
for (int i = sections.length - 1; i >= 0; i--) { | ||
if (!sections[i].isEmpty()) { | ||
subChunkCount = i + 1; | ||
break; | ||
} | ||
} | ||
|
||
int maxDimensionSections = dimensionData.getHeight() >> 4; | ||
subChunkCount = Math.min(maxDimensionSections, subChunkCount); | ||
|
||
// In 1.18 3D biome palettes were introduced. However, current world format | ||
// used internally doesn't support them, so we need to convert from legacy 2D | ||
byte[] biomePalettes = convert2DBiomesTo3D(chunk, maxDimensionSections); | ||
BinaryStream stream = ThreadCache.binaryStream.get().reset(); | ||
|
||
// Overworld has negative coordinates, but we currently do not support them | ||
int writtenSections = subChunkCount; | ||
if (dimensionData.getDimensionId() == Level.DIMENSION_OVERWORLD && subChunkCount < maxDimensionSections) { | ||
stream.put(negativeSubChunks); | ||
writtenSections += EXTENDED_NEGATIVE_SUB_CHUNKS; | ||
} | ||
|
||
for (int i = 0; i < subChunkCount; i++) { | ||
sections[i].writeTo(stream); | ||
} | ||
|
||
stream.put(biomePalettes); | ||
stream.putByte((byte) 0); // Border blocks | ||
stream.put(blockEntities); | ||
callback.accept(stream, writtenSections); | ||
} | ||
|
||
private static byte[] serializeEntities(BaseChunk chunk) { | ||
List<CompoundTag> tagList = new ObjectArrayList<>(); | ||
for (BlockEntity blockEntity : chunk.getBlockEntities().values()) { | ||
if (blockEntity instanceof BlockEntitySpawnable) { | ||
tagList.add(((BlockEntitySpawnable) blockEntity).getSpawnCompound()); | ||
} | ||
} | ||
|
||
try { | ||
return NBTIO.write(tagList, ByteOrder.LITTLE_ENDIAN, true); | ||
} catch (IOException e) { | ||
throw new RuntimeException(e); | ||
} | ||
} | ||
|
||
private static byte[] convert2DBiomesTo3D(BaseFullChunk chunk, int sections) { | ||
PalettedBlockStorage palette = PalettedBlockStorage.createWithDefaultState(Biome.getBiomeIdOrCorrect(chunk.getBiomeId(0, 0))); | ||
for (int x = 0; x < 16; x++) { | ||
for (int z = 0; z < 16; z++) { | ||
int biomeId = Biome.getBiomeIdOrCorrect(chunk.getBiomeId(x, z)); | ||
for (int y = 0; y < 16; y++) { | ||
palette.setBlock(x, y, z, biomeId); | ||
} | ||
} | ||
} | ||
|
||
BinaryStream stream = ThreadCache.binaryStream.get().reset(); | ||
palette.writeTo(stream); | ||
byte[] bytes = stream.getBuffer(); | ||
stream.reset(); | ||
|
||
for (int i = 0; i < sections; i++) { | ||
stream.put(bytes); | ||
} | ||
return stream.getBuffer(); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.