more robust java audio support
All checks were successful
studiorailgun/Renderer/pipeline/head This commit looks good

This commit is contained in:
austin 2024-08-07 19:52:35 -04:00
parent 2ebeb71f86
commit 5e87dec05d
7 changed files with 139 additions and 29 deletions

Binary file not shown.

Binary file not shown.

View File

@ -182,7 +182,7 @@
"priorityCategory" : "MOVEMENT_MODIFIER"
},
"audioData" : {
"audioPath" : "Audio/movement/Equip A.wav"
"audioPath" : "Audio/movement/landing-on-the-ground-4.wav"
}
}
}

View File

@ -21,7 +21,6 @@ Things that feel bad:
Sound effect on hit
Sound effect on walk
Sound effect on jump
Sound effect on land
Sound effect on block
Allow block hotboxes to block damage
Server packet on damage collision

View File

@ -4,13 +4,13 @@ import electrosphere.logger.LoggerInterface;
import electrosphere.util.FileUtils;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
@ -34,12 +34,30 @@ public class AudioBuffer {
//the id of the buffer
private int bufferId;
//The main audio data
private ShortBuffer pcm = null;
//The number of channels for the audio
private int channels = 0;
//The sample rate of the audio
private float sampleRate = 0;
//The size of a single sample in bits
private int sampleSize = 0;
//The framerate
private float frameRate = 0;
//the length of a frame
private long frameLength = 0;
//The size of a single frame
private int frameSize = 0;
//the length of the audio source in milliseconds
float length = 0;
//whether this buffer has created an al buffer object or not
boolean isBuffered = false;
/**
* Creates the audio buffer object
@ -50,7 +68,7 @@ public class AudioBuffer {
bufferId = alGenBuffers();
//read vorbis
if(fileNameSanitized.contains(".ogg")){
if(!isBuffered && fileNameSanitized.contains(".ogg")){
//create buffer to store vorbis data
try(STBVorbisInfo info = STBVorbisInfo.malloc()){
readVorbis(fileNameSanitized, bufferId, 32 * 1024, info);
@ -61,8 +79,14 @@ public class AudioBuffer {
//read wav
if(fileNameSanitized.contains(".wav")){
readWav(fileNameSanitized, 32 * 1024, bufferId);
try {
if(!isBuffered && AudioSystem.getAudioFileFormat(FileUtils.getAssetFile(fileNameSanitized)) != null){
readJava(fileNameSanitized, bufferId);
}
} catch(UnsupportedAudioFileException ex){
LoggerInterface.loggerAudio.ERROR(ex);
} catch (IOException ex){
LoggerInterface.loggerAudio.ERROR(ex);
}
@ -80,7 +104,7 @@ public class AudioBuffer {
private void readVorbis(String filepath, int bufferId, int bufferSize, STBVorbisInfo info) throws Exception {
//buffer containing vorbis metadata
ByteBuffer vorbis = null;
try (MemoryStack stack = MemoryStack.stackPush()) {
try(MemoryStack stack = MemoryStack.stackPush()){
//read the vorbis data from disk
vorbis = AudioBuffer.readFilepathToByteBuffer(filepath);
@ -94,54 +118,126 @@ public class AudioBuffer {
//creates the vorbis metadata object and grabs information about the audio file
STBVorbis.stb_vorbis_get_info(decoder, info);
int channels = info.channels();
this.channels = info.channels();
int lengthSamples = STBVorbis.stb_vorbis_stream_length_in_samples(decoder);
this.length = STBVorbis.stb_vorbis_stream_length_in_seconds(decoder) * 1;
//reads the main audio data
pcm = MemoryUtil.memAllocShort(lengthSamples);
pcm.limit(STBVorbis.stb_vorbis_get_samples_short_interleaved(decoder, channels, pcm) * channels);
ShortBuffer pcm = MemoryUtil.memAllocShort(lengthSamples);
pcm.limit(STBVorbis.stb_vorbis_get_samples_short_interleaved(decoder, this.channels, pcm) * this.channels);
//close decoder and return
STBVorbis.stb_vorbis_close(decoder);
// Copy to buffer
alBufferData(bufferId, info.channels() == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, pcm, info.sample_rate());
alBufferData(bufferId, this.channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, pcm, info.sample_rate());
isBuffered = true;
}
}
/**
* Reads a wav file
* Tries reading an audio file using java built in audio processing
* @param filepath The filepath to the wav
* @param bufferSize The buffer size
* @param bufferId The id of the buffer
*/
private void readWav(String filepath, int bufferSize, int bufferId){
try {
private void readJava(String filepath, int bufferId){
try(MemoryStack stack = MemoryStack.stackPush()){
//get raw file objects
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(FileUtils.getAssetFile(filepath));
AudioFormat format = fileFormat.getFormat();
AudioInputStream inputStream = AudioSystem.getAudioInputStream(FileUtils.getAssetFile(filepath));
AudioInputStream inputStreamRaw = AudioSystem.getAudioInputStream(FileUtils.getAssetFile(filepath));
AudioFormat format = inputStreamRaw.getFormat();
AudioFormat.Encoding encoding = format.getEncoding();
AudioInputStream inputStreamEncoded = AudioSystem.getAudioInputStream(encoding, inputStreamRaw);
//get current format values
float currentFormatSampleRate = format.getSampleRate();
int desiredSampleSizeInBits = 16;
int desiredChannels = format.getChannels() > 2 ? 2 : format.getChannels();
boolean desiredSigned = true;
boolean currentFormatIsBigEndian = format.isBigEndian();
//get the desired format
AudioFormat desiredFormat = new AudioFormat(currentFormatSampleRate, desiredSampleSizeInBits, desiredChannels, desiredSigned, currentFormatIsBigEndian);
AudioInputStream finalStream = AudioSystem.getAudioInputStream(desiredFormat, inputStreamEncoded);
//get data about specific file
int channels = format.getChannels();
float sampleRate = format.getSampleRate();
this.length = fileFormat.getFrameLength() * format.getFrameRate();
boolean isBigEndian = desiredFormat.isBigEndian();
this.channels = desiredFormat.getChannels();
this.sampleRate = desiredFormat.getSampleRate();
this.sampleSize = desiredFormat.getSampleSizeInBits();
this.frameLength = finalStream.getFrameLength();
this.frameSize = desiredFormat.getFrameSize();
this.frameRate = desiredFormat.getFrameRate();
this.length = frameLength / frameRate;
LoggerInterface.loggerAudio.INFO(
"filepath: " + filepath + "\n" +
"encoding: " + encoding + "\n" +
"isBigEndian: " + isBigEndian + "\n" +
"channels: " + this.channels + "\n" +
"sampleRate: " + this.sampleRate + "\n" +
"sampleSize: " + this.sampleSize + "\n" +
"frameLength: " + this.frameLength + "\n" +
"frameSize: " + this.frameSize + "\n" +
"frameRate: " + this.frameRate + "\n" +
"length: " + this.length + "\n"
);
//read data
byte[] dataRaw = inputStream.readAllBytes();
byte[] dataRaw = finalStream.readAllBytes();
ByteBuffer buffer = MemoryUtil.memAlloc(dataRaw.length);
buffer.order(isBigEndian ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN);
buffer.put(dataRaw);
buffer.flip();
//sanity check
if(this.length > 10 * 60 * 60 * 1000){
String message = "Audio file has length greater than 10 hours!\n" +
"filepath: " + filepath + "\n" +
"channels: " + channels + "\n" +
"sampleRate: " + sampleRate + "\n" +
"length: " + this.length + "\n" +
"frame length: " + finalStream.getFrameLength() + "\n" +
"frame rate: " + format.getFrameRate() + "\n"
;
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(message));
}
if(sampleRate == AudioSystem.NOT_SPECIFIED){
String message = "Sample rate not specified!\n" +
"filepath: " + filepath + "\n"
;
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(message));
}
if(format.getFrameRate() == AudioSystem.NOT_SPECIFIED){
String message = "Frame rate not specified!\n" +
"filepath: " + filepath + "\n"
;
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(message));
}/* */
if(channels == AudioSystem.NOT_SPECIFIED){
String message = "Channels not specified!\n" +
"filepath: " + filepath + "\n"
;
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(message));
}
if(channels > 2){
String message = "More than two channels defined in audio file! The engine will only use the first two!\n" +
"filepath: " + filepath + "\n"
;
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(message));
}
//buffer to openal
AL11.alBufferData(bufferId, channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, buffer, (int)sampleRate);
isBuffered = true;
//cleanup
MemoryUtil.memFree(buffer);
} catch (UnsupportedAudioFileException e) {
// TODO Auto-generated catch block
e.printStackTrace();
LoggerInterface.loggerAudio.ERROR(e);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
LoggerInterface.loggerAudio.ERROR(e);
}
}

View File

@ -6,6 +6,9 @@ import electrosphere.logger.LoggerInterface;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioSystem;
import org.joml.Vector3f;
import org.lwjgl.BufferUtils;
import org.lwjgl.openal.AL;
@ -65,6 +68,7 @@ public class AudioEngine {
public void init() {
try {
initDevice();
echoJavaAudioSupport();
} catch (Exception ex) {
LoggerInterface.loggerEngine.ERROR("Error initializing audio device", ex);
}
@ -148,6 +152,16 @@ public class AudioEngine {
return rVal;
}
/**
* Echos the available support for different audio types from JRE itself
*/
private void echoJavaAudioSupport(){
LoggerInterface.loggerAudio.INFO("Check JRE-supported audio file types");
for(AudioFileFormat.Type audioType : AudioSystem.getAudioFileTypes()){
LoggerInterface.loggerAudio.INFO(audioType.getExtension() + " support: " + AudioSystem.isFileTypeSupported(audioType));
}
}
/**
* Updates the orientation of the listener based on the global player camera
*/

View File

@ -471,6 +471,7 @@ public class Globals {
"/Audio/weapons/swordUnsheath1.ogg",
"/Audio/weapons/swoosh-03.ogg",
"/Audio/movement/Equip A.wav",
"/Audio/movement/landing-on-the-ground-4.wav",
};
LoggerInterface.loggerStartup.INFO("Loading default audio resources");
for(String path : audioToInit){