no allocations on read/write block chunks
Some checks failed
studiorailgun/Renderer/pipeline/head There was a failure building this commit

This commit is contained in:
austin 2025-05-25 21:53:39 -04:00
parent f49e87261a
commit 8bdebb16c2
3 changed files with 156 additions and 35 deletions

View File

@ -1996,6 +1996,7 @@ Performance improvements
- Block chunk disk map writes files without allocating a buffer
- Increase memory limit 6GB->8GB
- Server block chunk disk map writes directly to output stream instead of inbetween buffer
- No-allocation block chunk read/write in disk map
Increase human move speed
LOD components re-attach physics
VectorPool->JomlPool

View File

@ -1,16 +1,19 @@
package electrosphere.server.physics.block.diskmap;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterOutputStream;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
import electrosphere.client.block.BlockChunkData;
import electrosphere.engine.Globals;
@ -50,22 +53,64 @@ public class ServerBlockChunkDiskMap {
*/
static final int HEADER_HOMOGENOUS = 1;
/**
* Total size of the file potentially
*/
static final int FILE_MAX_SIZE = FILE_HEADER + BlockChunkData.TOTAL_DATA_WIDTH * (2 * 2);
/**
* The map of world position+chunk type to the file that actually houses that information
*/
Map<Long,String> worldPosFileMap;
private Map<Long,String> worldPosFileMap;
/**
* Locks the chunk disk map for thread safety
*/
@Exclude
ReentrantLock lock = new ReentrantLock();
private ReentrantLock lock = new ReentrantLock();
/**
* The buffer used for writing out files
*/
@Exclude
private ByteBuffer outputBuffer;
/**
* Buffer for compression input
*/
@Exclude
private ByteBuffer compressInputBuffer;
/**
* The buffer used for reading in files
*/
@Exclude
private ByteBuffer inputBuffer;
/**
* Deflater used for compressing outgoing files
*/
@Exclude
private Deflater deflater;
/**
* Inflater used for decompressing incoming files
*/
@Exclude
private Inflater inflater;
/**
* Constructor
*/
private ServerBlockChunkDiskMap(){
worldPosFileMap = new HashMap<Long,String>();
outputBuffer = ByteBuffer.allocate(FILE_MAX_SIZE);
compressInputBuffer = ByteBuffer.allocate(FILE_MAX_SIZE);
inputBuffer = ByteBuffer.allocate(FILE_MAX_SIZE);
deflater = new Deflater();
deflater.setInput(compressInputBuffer);
inflater = new Inflater();
inflater.setInput(compressInputBuffer);
}
/**
@ -139,30 +184,42 @@ public class ServerBlockChunkDiskMap {
if(this.containsBlocksAtPosition(worldX, worldY, worldZ)){
//read file
String fileName = worldPosFileMap.get(getBlockChunkKey(worldX, worldY, worldZ));
byte[] rawDataCompressed = FileUtils.loadBinaryFromSavePath(Globals.serverState.currentSave.getName(), fileName);
//decompress
byte[] rawData = null;
ByteArrayOutputStream out = new ByteArrayOutputStream();
InflaterOutputStream inflaterInputStream = new InflaterOutputStream(out);
try {
inflaterInputStream.write(rawDataCompressed);
inflaterInputStream.flush();
inflaterInputStream.close();
rawData = out.toByteArray();
} catch (IOException e) {
LoggerInterface.loggerFileIO.ERROR(e);
}
//parse
if(rawData != null){
ByteBuffer buffer = ByteBuffer.wrap(rawData);
//Construct the channel
InputStream inputStream = FileUtils.getSavePathAsInputStream(Globals.serverState.currentSave.getName(), fileName);
ReadableByteChannel channel = Channels.newChannel(inputStream);
//setup compression input buffer
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
//Read the file into the channel
channel.read(compressInputBuffer);
compressInputBuffer.flip();
//setup the inflater
inflater.setInput(compressInputBuffer);
//decompress
inflater.inflate(inputBuffer);
inputBuffer.flip();
//error check
if(!inflater.finished()){
throw new Error("Failed to read!");
}
//parse
rVal = new BlockChunkData();
int headerHomogenousType = buffer.getInt();
int headerHomogenousType = inputBuffer.getInt();
if(headerHomogenousType == HEADER_NON_HOMOGENOUS){
//read a non-homogenous chunk
ShortBuffer shortView = buffer.asShortBuffer();
ShortBuffer shortView = inputBuffer.asShortBuffer();
short[] type = BlockChunkPool.getShort();
short[] metadata = BlockChunkPool.getShort();
short firstType = -1;
@ -186,7 +243,7 @@ public class ServerBlockChunkDiskMap {
} else {
//read a homogenous chunk
short homogenousValue = buffer.getShort();
short homogenousValue = inputBuffer.getShort();
rVal.setHomogenousValue(homogenousValue);
}
@ -195,6 +252,32 @@ public class ServerBlockChunkDiskMap {
rVal.setWorldY(worldY);
rVal.setWorldZ(worldZ);
rVal.setLod(BlockChunkData.LOD_FULL_RES);
//close channel
channel.close();
inputStream.close();
//reset buffers
inflater.reset();
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
inputBuffer.position(0);
inputBuffer.limit(FILE_MAX_SIZE);
} catch (IOException ex){
inflater.reset();
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
inputBuffer.position(0);
inputBuffer.limit(FILE_MAX_SIZE);
LoggerInterface.loggerFileIO.ERROR(ex);
} catch (DataFormatException e) {
inflater.reset();
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
inputBuffer.position(0);
inputBuffer.limit(FILE_MAX_SIZE);
LoggerInterface.loggerFileIO.ERROR(e);
}
}
lock.unlock();
@ -218,39 +301,65 @@ public class ServerBlockChunkDiskMap {
}
//compress
try {
OutputStream out = FileUtils.getBinarySavePathOutputStream(Globals.serverState.currentSave.getName(), fileName);
DeflaterOutputStream deflaterOutStream = new DeflaterOutputStream(out);
DataOutputStream dataOut = new DataOutputStream(deflaterOutStream);
//generate binary for the file
short[] type = chunkData.getType();
short[] metadata = chunkData.getMetadata();
//push data
compressInputBuffer.position(0);
if(chunkData.getHomogenousValue() == BlockChunkData.NOT_HOMOGENOUS){
//put header
dataOut.writeInt(HEADER_NON_HOMOGENOUS);
compressInputBuffer.putInt(HEADER_NON_HOMOGENOUS);
//put data
for(int i = 0; i < BlockChunkData.TOTAL_DATA_WIDTH; i++){
dataOut.writeShort(type[i]);
compressInputBuffer.putShort(type[i]);
}
for(int i = 0; i < BlockChunkData.TOTAL_DATA_WIDTH; i++){
dataOut.writeShort(metadata[i]);
compressInputBuffer.putShort(metadata[i]);
}
} else {
//put header
dataOut.writeInt(HEADER_HOMOGENOUS);
compressInputBuffer.putInt(HEADER_HOMOGENOUS);
//put data
dataOut.writeShort(chunkData.getHomogenousValue());
compressInputBuffer.putShort(chunkData.getHomogenousValue());
}
compressInputBuffer.flip();
//setup deflater
deflater.setInput(compressInputBuffer);
deflater.finish();
//construct channel
OutputStream out = FileUtils.getBinarySavePathOutputStream(Globals.serverState.currentSave.getName(), fileName);
WritableByteChannel channel = Channels.newChannel(out);
//write
while(!deflater.finished()){
deflater.deflate(outputBuffer);
outputBuffer.flip();
channel.write(outputBuffer);
}
//flush and close
dataOut.flush();
dataOut.close();
channel.close();
out.flush();
out.close();
//reset buffers
deflater.reset();
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
outputBuffer.position(0);
outputBuffer.limit(FILE_MAX_SIZE);
//save to the map of filenames
worldPosFileMap.put(chunkKey,fileName);
} catch (IOException e) {
deflater.reset();
compressInputBuffer.position(0);
compressInputBuffer.limit(FILE_MAX_SIZE);
outputBuffer.position(0);
outputBuffer.limit(FILE_MAX_SIZE);
LoggerInterface.loggerFileIO.ERROR(e);
}
lock.unlock();

View File

@ -385,6 +385,17 @@ public class FileUtils {
return rVal;
}
/**
* Gets a save files as an input stream
* @param saveName The save name
* @param pathName The path within the save folder
* @return The input stream
*/
public static InputStream getSavePathAsInputStream(String saveName, String pathName) throws IOException {
String sanitizedFilePath = FileUtils.sanitizeFilePath(pathName);
return Files.newInputStream(FileUtils.getSaveFile(saveName,sanitizedFilePath).toPath());
}
/**
* Writes a binary file to a save folder's file
* @param saveName The name of the save