package net.minecraft.world.chunk.storage; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ResourceLocation; import net.minecraft.util.datafix.DataFixer; import net.minecraft.util.datafix.FixTypes; import net.minecraft.util.datafix.IDataFixer; import net.minecraft.util.datafix.IDataWalker; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.ChunkPos; import net.minecraft.world.MinecraftException; import net.minecraft.world.NextTickListEntry; import net.minecraft.world.World; import net.minecraft.world.chunk.Chunk; import net.minecraft.world.chunk.NibbleArray; import net.minecraft.world.storage.IThreadedFileIO; import net.minecraft.world.storage.ThreadedFileIOBase; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class AnvilChunkLoader implements IChunkLoader, IThreadedFileIO { private static final Logger LOGGER = LogManager.getLogger(); // removals for MC-119971 // private final Map chunksToRemove = new ConcurrentHashMap(); // private final Set pendingAnvilChunksCoordinates = Collections.newSetFromMap(new ConcurrentHashMap()); /* --- new data structures for MC-119971 --- */ // New data structures don't need to be concurrent since we're doing our // own mutex. Using the concurrent ones would now just be extra overhead. private final Map chunksToRemove = new HashMap(); // Currently there will never be more than one chunk being written at // one time, but this is convenient and leaves open the future option. private final Map chunksInWrite = new HashMap(); /* --- new synchronized methods for MC-119971 --- */ // Insert new chunk into pending queue, replacing any older one // at the same position synchronized private void queueChunkToRemove(ChunkPos pos, NBTTagCompound data) { chunksToRemove.put(pos, data); // No need to check chunksInWrite. It may contain an older one // at this location, but what matters is not losing the new one. // This fixes Rich Crosby's bug. } // Fetch another chunk to save to disk and atomically move it into // the queue of chunk(s) being written. synchronized private Map.Entry fetchChunkToWrite() { if (chunksToRemove.isEmpty()) return null; // Pick an entry in chunksToRemove and remove it from the collection Set> entrySet = chunksToRemove.entrySet(); Iterator> iter = entrySet.iterator(); Map.Entry entry = iter.next(); iter.remove(); // Indicate that this entry is going to be written out now chunksInWrite.put(entry.getKey(), entry.getValue()); return entry; } // Once the write for a chunk is completely committed to disk, // this method discards it synchronized private void retireChunkToWrite(ChunkPos pos, NBTTagCompound data) { chunksInWrite.remove(pos); } // Check these data structures for a chunk being reloaded synchronized private NBTTagCompound reloadChunkFromRemoveQueues(ChunkPos pos) { // If this chunk is queued at all, the most recent version will be in // chunksToRemove. NBTTagCompound data = chunksToRemove.get(pos); // Note: The above line fetches the chunk but leaves it in the // queue to be saved. This is the original behavior and probably // safest in terms of avoiding data loss on a crash. However, if // we wanted to *cancel* the save, replace 'get' with 'remove'. // If we found the chunk return it if (data != null) return data; // Otherwise, check in chunksInWrite. This is what fixes // MC-119971. return chunksInWrite.get(pos); } // Check if chunk exists at all in any pending save state synchronized private boolean chunkExistInRemoveQueues(ChunkPos pos) { return chunksToRemove.containsKey(pos) || chunksInWrite.containsKey(pos); } /* --- end of new code for MC-119971 --- */ /** Save directory for chunks using the Anvil format */ private final File chunkSaveLocation; private final DataFixer dataFixer; private boolean savingExtraData; public AnvilChunkLoader(File chunkSaveLocationIn, DataFixer dataFixerIn) { this.chunkSaveLocation = chunkSaveLocationIn; this.dataFixer = dataFixerIn; } @Nullable /** * Loads the specified(XZ) chunk into the specified world. */ public Chunk loadChunk(World worldIn, int x, int z) throws IOException { ChunkPos chunkpos = new ChunkPos(x, z); // deleted line for MC-119971 // NBTTagCompound nbttagcompound = (NBTTagCompound)this.chunksToRemove.get(chunkpos); NBTTagCompound nbttagcompound = reloadChunkFromRemoveQueues(chunkpos); // new for MC-119971 if (nbttagcompound == null) { DataInputStream datainputstream = RegionFileCache.getChunkInputStream(this.chunkSaveLocation, x, z); if (datainputstream == null) { return null; } nbttagcompound = this.dataFixer.process(FixTypes.CHUNK, CompressedStreamTools.read(datainputstream)); } return this.checkedReadChunkFromNBT(worldIn, x, z, nbttagcompound); } // This method is in the MCPBot database as "isChunkGeneratedAt" // More than one line of this function is modified for MC-119971 public boolean func_191063_a(int p_191063_1_, int p_191063_2_) { ChunkPos chunkpos = new ChunkPos(p_191063_1_, p_191063_2_); //NBTTagCompound nbttagcompound = (NBTTagCompound)this.chunksToRemove.get(chunkpos); boolean exists = chunkExistInRemoveQueues(chunkpos); return exists ? true : RegionFileCache.func_191064_f(this.chunkSaveLocation, p_191063_1_, p_191063_2_); } @Nullable /** * Wraps readChunkFromNBT. Checks the coordinates and several NBT tags. */ protected Chunk checkedReadChunkFromNBT(World worldIn, int x, int z, NBTTagCompound compound) { if (!compound.hasKey("Level", 10)) { LOGGER.error("Chunk file at {},{} is missing level data, skipping", new Object[] {Integer.valueOf(x), Integer.valueOf(z)}); return null; } else { NBTTagCompound nbttagcompound = compound.getCompoundTag("Level"); if (!nbttagcompound.hasKey("Sections", 9)) { LOGGER.error("Chunk file at {},{} is missing block data, skipping", new Object[] {Integer.valueOf(x), Integer.valueOf(z)}); return null; } else { Chunk chunk = this.readChunkFromNBT(worldIn, nbttagcompound); if (!chunk.isAtLocation(x, z)) { LOGGER.error("Chunk file at {},{} is in the wrong location; relocating. (Expected {}, {}, got {}, {})", new Object[] {Integer.valueOf(x), Integer.valueOf(z), Integer.valueOf(x), Integer.valueOf(z), Integer.valueOf(chunk.xPosition), Integer.valueOf(chunk.zPosition)}); nbttagcompound.setInteger("xPos", x); nbttagcompound.setInteger("zPos", z); chunk = this.readChunkFromNBT(worldIn, nbttagcompound); } return chunk; } } } public void saveChunk(World worldIn, Chunk chunkIn) throws MinecraftException, IOException { worldIn.checkSessionLock(); try { NBTTagCompound nbttagcompound = new NBTTagCompound(); NBTTagCompound nbttagcompound1 = new NBTTagCompound(); nbttagcompound.setTag("Level", nbttagcompound1); nbttagcompound.setInteger("DataVersion", 922); this.writeChunkToNBT(chunkIn, worldIn, nbttagcompound1); this.addChunkToPending(chunkIn.getChunkCoordIntPair(), nbttagcompound); } catch (Exception exception) { LOGGER.error((String)"Failed to save chunk", (Throwable)exception); } } protected void addChunkToPending(ChunkPos pos, NBTTagCompound compound) { // removed for MC-119971 // if (!this.pendingAnvilChunksCoordinates.contains(pos)) // { // this.chunksToRemove.put(pos, compound); // } // new for MC-119971 queueChunkToRemove(pos, compound); ThreadedFileIOBase.getThreadedIOInstance().queueIO(this); } /** * Returns a boolean stating if the write was unsuccessful. */ public boolean writeNextIO() { // Removals for MC-119971 // if (this.chunksToRemove.isEmpty()) // { // if (this.savingExtraData) // { // LOGGER.info("ThreadedAnvilChunkStorage ({}): All chunks are saved", new Object[] {this.chunkSaveLocation.getName()}); // } // // return false; // } // else // { //ChunkPos chunkpos = (ChunkPos)this.chunksToRemove.keySet().iterator().next(); // New for MC-119971 // Try to fetch a pending chunk Map.Entry entry = fetchChunkToWrite(); if (entry == null) { // If none left, here's code for some message that will never // be executed since there is no "extra data." if (this.savingExtraData) { LOGGER.info("ThreadedAnvilChunkStorage ({}): All chunks are saved", new Object[] {this.chunkSaveLocation.getName()}); } return false; } // New for MC-119971 ChunkPos chunkpos = entry.getKey(); NBTTagCompound nbttagcompound = entry.getValue(); boolean lvt_3_1_; // Removal for MC-119971 // Redundant try since writeChunkData is the only thing that can // throw an exception here. // try // { // Removal for MC-119971 // Noting that a chunk is being written out is handled // already by fetchChunkToWrite and chunk is already // removed from container. // this.pendingAnvilChunksCoordinates.add(chunkpos); // NBTTagCompound nbttagcompound = (NBTTagCompound)this.chunksToRemove.remove(chunkpos); // nbttagcompound can't be null //if (nbttagcompound != null) //{ try { this.writeChunkData(chunkpos, nbttagcompound); } catch (Exception exception) { LOGGER.error((String)"Failed to save chunk", (Throwable)exception); } //} lvt_3_1_ = true; // Removal for MC-119971 // } // finally // { // this.pendingAnvilChunksCoordinates.remove(chunkpos); // } // New for MC-119971 // Now that the chunk is fully committed to disk and any // load would now get it from the RegionFile, we can // retire this chunk from the chunkloader data structures. retireChunkToWrite(chunkpos, nbttagcompound); return lvt_3_1_; // } } private void writeChunkData(ChunkPos pos, NBTTagCompound compound) throws IOException { DataOutputStream dataoutputstream = RegionFileCache.getChunkOutputStream(this.chunkSaveLocation, pos.chunkXPos, pos.chunkZPos); CompressedStreamTools.write(compound, dataoutputstream); dataoutputstream.close(); } /** * Save extra data associated with this Chunk not normally saved during autosave, only during chunk unload. * Currently unused. */ public void saveExtraChunkData(World worldIn, Chunk chunkIn) throws IOException { } /** * Called every World.tick() */ public void chunkTick() { } /** * Save extra data not associated with any Chunk. Not saved during autosave, only during world unload. Currently * unused. */ public void saveExtraData() { try { this.savingExtraData = true; while (this.writeNextIO()); } finally { this.savingExtraData = false; } } public static void registerFixes(DataFixer fixer) { fixer.registerWalker(FixTypes.CHUNK, new IDataWalker() { public NBTTagCompound process(IDataFixer fixer, NBTTagCompound compound, int versionIn) { if (compound.hasKey("Level", 10)) { NBTTagCompound nbttagcompound = compound.getCompoundTag("Level"); if (nbttagcompound.hasKey("Entities", 9)) { NBTTagList nbttaglist = nbttagcompound.getTagList("Entities", 10); for (int i = 0; i < nbttaglist.tagCount(); ++i) { nbttaglist.set(i, fixer.process(FixTypes.ENTITY, (NBTTagCompound)nbttaglist.get(i), versionIn)); } } if (nbttagcompound.hasKey("TileEntities", 9)) { NBTTagList nbttaglist1 = nbttagcompound.getTagList("TileEntities", 10); for (int j = 0; j < nbttaglist1.tagCount(); ++j) { nbttaglist1.set(j, fixer.process(FixTypes.BLOCK_ENTITY, (NBTTagCompound)nbttaglist1.get(j), versionIn)); } } } return compound; } }); } /** * Writes the Chunk passed as an argument to the NBTTagCompound also passed, using the World argument to retrieve * the Chunk's last update time. */ private void writeChunkToNBT(Chunk chunkIn, World worldIn, NBTTagCompound compound) { compound.setInteger("xPos", chunkIn.xPosition); compound.setInteger("zPos", chunkIn.zPosition); compound.setLong("LastUpdate", worldIn.getTotalWorldTime()); compound.setIntArray("HeightMap", chunkIn.getHeightMap()); compound.setBoolean("TerrainPopulated", chunkIn.isTerrainPopulated()); compound.setBoolean("LightPopulated", chunkIn.isLightPopulated()); compound.setLong("InhabitedTime", chunkIn.getInhabitedTime()); ExtendedBlockStorage[] aextendedblockstorage = chunkIn.getBlockStorageArray(); NBTTagList nbttaglist = new NBTTagList(); boolean flag = worldIn.provider.func_191066_m(); for (ExtendedBlockStorage extendedblockstorage : aextendedblockstorage) { if (extendedblockstorage != Chunk.NULL_BLOCK_STORAGE) { NBTTagCompound nbttagcompound = new NBTTagCompound(); nbttagcompound.setByte("Y", (byte)(extendedblockstorage.getYLocation() >> 4 & 255)); byte[] abyte = new byte[4096]; NibbleArray nibblearray = new NibbleArray(); NibbleArray nibblearray1 = extendedblockstorage.getData().getDataForNBT(abyte, nibblearray); nbttagcompound.setByteArray("Blocks", abyte); nbttagcompound.setByteArray("Data", nibblearray.getData()); if (nibblearray1 != null) { nbttagcompound.setByteArray("Add", nibblearray1.getData()); } nbttagcompound.setByteArray("BlockLight", extendedblockstorage.getBlocklightArray().getData()); if (flag) { nbttagcompound.setByteArray("SkyLight", extendedblockstorage.getSkylightArray().getData()); } else { nbttagcompound.setByteArray("SkyLight", new byte[extendedblockstorage.getBlocklightArray().getData().length]); } nbttaglist.appendTag(nbttagcompound); } } compound.setTag("Sections", nbttaglist); compound.setByteArray("Biomes", chunkIn.getBiomeArray()); chunkIn.setHasEntities(false); NBTTagList nbttaglist1 = new NBTTagList(); for (int i = 0; i < chunkIn.getEntityLists().length; ++i) { for (Entity entity : chunkIn.getEntityLists()[i]) { NBTTagCompound nbttagcompound2 = new NBTTagCompound(); if (entity.writeToNBTOptional(nbttagcompound2)) { chunkIn.setHasEntities(true); nbttaglist1.appendTag(nbttagcompound2); } } } compound.setTag("Entities", nbttaglist1); NBTTagList nbttaglist2 = new NBTTagList(); for (TileEntity tileentity : chunkIn.getTileEntityMap().values()) { NBTTagCompound nbttagcompound3 = tileentity.writeToNBT(new NBTTagCompound()); nbttaglist2.appendTag(nbttagcompound3); } compound.setTag("TileEntities", nbttaglist2); List list = worldIn.getPendingBlockUpdates(chunkIn, false); if (list != null) { long j = worldIn.getTotalWorldTime(); NBTTagList nbttaglist3 = new NBTTagList(); for (NextTickListEntry nextticklistentry : list) { NBTTagCompound nbttagcompound1 = new NBTTagCompound(); ResourceLocation resourcelocation = (ResourceLocation)Block.REGISTRY.getNameForObject(nextticklistentry.getBlock()); nbttagcompound1.setString("i", resourcelocation == null ? "" : resourcelocation.toString()); nbttagcompound1.setInteger("x", nextticklistentry.position.getX()); nbttagcompound1.setInteger("y", nextticklistentry.position.getY()); nbttagcompound1.setInteger("z", nextticklistentry.position.getZ()); nbttagcompound1.setInteger("t", (int)(nextticklistentry.scheduledTime - j)); nbttagcompound1.setInteger("p", nextticklistentry.priority); nbttaglist3.appendTag(nbttagcompound1); } compound.setTag("TileTicks", nbttaglist3); } } /** * Reads the data stored in the passed NBTTagCompound and creates a Chunk with that data in the passed World. * Returns the created Chunk. */ private Chunk readChunkFromNBT(World worldIn, NBTTagCompound compound) { int i = compound.getInteger("xPos"); int j = compound.getInteger("zPos"); Chunk chunk = new Chunk(worldIn, i, j); chunk.setHeightMap(compound.getIntArray("HeightMap")); chunk.setTerrainPopulated(compound.getBoolean("TerrainPopulated")); chunk.setLightPopulated(compound.getBoolean("LightPopulated")); chunk.setInhabitedTime(compound.getLong("InhabitedTime")); NBTTagList nbttaglist = compound.getTagList("Sections", 10); int k = 16; ExtendedBlockStorage[] aextendedblockstorage = new ExtendedBlockStorage[16]; boolean flag = worldIn.provider.func_191066_m(); for (int l = 0; l < nbttaglist.tagCount(); ++l) { NBTTagCompound nbttagcompound = nbttaglist.getCompoundTagAt(l); int i1 = nbttagcompound.getByte("Y"); ExtendedBlockStorage extendedblockstorage = new ExtendedBlockStorage(i1 << 4, flag); byte[] abyte = nbttagcompound.getByteArray("Blocks"); NibbleArray nibblearray = new NibbleArray(nbttagcompound.getByteArray("Data")); NibbleArray nibblearray1 = nbttagcompound.hasKey("Add", 7) ? new NibbleArray(nbttagcompound.getByteArray("Add")) : null; extendedblockstorage.getData().setDataFromNBT(abyte, nibblearray, nibblearray1); extendedblockstorage.setBlocklightArray(new NibbleArray(nbttagcompound.getByteArray("BlockLight"))); if (flag) { extendedblockstorage.setSkylightArray(new NibbleArray(nbttagcompound.getByteArray("SkyLight"))); } extendedblockstorage.removeInvalidBlocks(); aextendedblockstorage[i1] = extendedblockstorage; } chunk.setStorageArrays(aextendedblockstorage); if (compound.hasKey("Biomes", 7)) { chunk.setBiomeArray(compound.getByteArray("Biomes")); } NBTTagList nbttaglist1 = compound.getTagList("Entities", 10); for (int j1 = 0; j1 < nbttaglist1.tagCount(); ++j1) { NBTTagCompound nbttagcompound1 = nbttaglist1.getCompoundTagAt(j1); readChunkEntity(nbttagcompound1, worldIn, chunk); chunk.setHasEntities(true); } NBTTagList nbttaglist2 = compound.getTagList("TileEntities", 10); for (int k1 = 0; k1 < nbttaglist2.tagCount(); ++k1) { NBTTagCompound nbttagcompound2 = nbttaglist2.getCompoundTagAt(k1); TileEntity tileentity = TileEntity.create(worldIn, nbttagcompound2); if (tileentity != null) { chunk.addTileEntity(tileentity); } } if (compound.hasKey("TileTicks", 9)) { NBTTagList nbttaglist3 = compound.getTagList("TileTicks", 10); for (int l1 = 0; l1 < nbttaglist3.tagCount(); ++l1) { NBTTagCompound nbttagcompound3 = nbttaglist3.getCompoundTagAt(l1); Block block; if (nbttagcompound3.hasKey("i", 8)) { block = Block.getBlockFromName(nbttagcompound3.getString("i")); } else { block = Block.getBlockById(nbttagcompound3.getInteger("i")); } worldIn.scheduleBlockUpdate(new BlockPos(nbttagcompound3.getInteger("x"), nbttagcompound3.getInteger("y"), nbttagcompound3.getInteger("z")), block, nbttagcompound3.getInteger("t"), nbttagcompound3.getInteger("p")); } } return chunk; } @Nullable public static Entity readChunkEntity(NBTTagCompound compound, World worldIn, Chunk chunkIn) { Entity entity = createEntityFromNBT(compound, worldIn); if (entity == null) { return null; } else { chunkIn.addEntity(entity); if (compound.hasKey("Passengers", 9)) { NBTTagList nbttaglist = compound.getTagList("Passengers", 10); for (int i = 0; i < nbttaglist.tagCount(); ++i) { Entity entity1 = readChunkEntity(nbttaglist.getCompoundTagAt(i), worldIn, chunkIn); if (entity1 != null) { entity1.startRiding(entity, true); } } } return entity; } } @Nullable public static Entity readWorldEntityPos(NBTTagCompound compound, World worldIn, double x, double y, double z, boolean attemptSpawn) { Entity entity = createEntityFromNBT(compound, worldIn); if (entity == null) { return null; } else { entity.setLocationAndAngles(x, y, z, entity.rotationYaw, entity.rotationPitch); if (attemptSpawn && !worldIn.spawnEntityInWorld(entity)) { return null; } else { if (compound.hasKey("Passengers", 9)) { NBTTagList nbttaglist = compound.getTagList("Passengers", 10); for (int i = 0; i < nbttaglist.tagCount(); ++i) { Entity entity1 = readWorldEntityPos(nbttaglist.getCompoundTagAt(i), worldIn, x, y, z, attemptSpawn); if (entity1 != null) { entity1.startRiding(entity, true); } } } return entity; } } } @Nullable protected static Entity createEntityFromNBT(NBTTagCompound compound, World worldIn) { try { return EntityList.createEntityFromNBT(compound, worldIn); } catch (RuntimeException var3) { return null; } } public static void spawnEntity(Entity entityIn, World worldIn) { if (worldIn.spawnEntityInWorld(entityIn) && entityIn.isBeingRidden()) { for (Entity entity : entityIn.getPassengers()) { spawnEntity(entity, worldIn); } } } @Nullable public static Entity readWorldEntity(NBTTagCompound compound, World worldIn, boolean p_186051_2_) { Entity entity = createEntityFromNBT(compound, worldIn); if (entity == null) { return null; } else if (p_186051_2_ && !worldIn.spawnEntityInWorld(entity)) { return null; } else { if (compound.hasKey("Passengers", 9)) { NBTTagList nbttaglist = compound.getTagList("Passengers", 10); for (int i = 0; i < nbttaglist.tagCount(); ++i) { Entity entity1 = readWorldEntity(nbttaglist.getCompoundTagAt(i), worldIn, p_186051_2_); if (entity1 != null) { entity1.startRiding(entity, true); } } } return entity; } } }