audio debugging
All checks were successful
studiorailgun/Renderer/pipeline/head This commit looks good

This commit is contained in:
austin 2024-08-15 15:36:06 -04:00
parent 0484e54f08
commit e9257784c6
10 changed files with 154 additions and 23 deletions

View File

@ -579,6 +579,7 @@ Melee AI tweaks
Walk tree
Slow down strafe movement somehow
Better creature damage sfx
Audio debugging
# TODO
@ -625,6 +626,8 @@ Transvoxel implementation
Terrain Interface Positional Access Interface
- Ability to get terrain at point for interactions with game world eg placing grass/water collision
Build system to allow specifying certain audio files to load as stereo
Rework how chunks are written to disk to make them more cache friendly
- IE, write consecutively higher LOD levels the further into the file, so that you can read just the first few bytes if its a far away chunk

View File

@ -1,5 +1,6 @@
package electrosphere.audio;
import electrosphere.engine.Globals;
import electrosphere.logger.LoggerInterface;
import electrosphere.util.FileUtils;
import java.io.IOException;
@ -70,6 +71,7 @@ public class AudioBuffer {
String fileNameSanitized = FileUtils.sanitizeFilePath(fileNameRaw);
this.filePath = fileNameSanitized;
bufferId = alGenBuffers();
Globals.audioEngine.checkError();
//read vorbis
if(!isBuffered && fileNameSanitized.contains(".ogg")){
@ -85,7 +87,7 @@ public class AudioBuffer {
//read wav
try {
if(!isBuffered && AudioSystem.getAudioFileFormat(FileUtils.getAssetFile(fileNameSanitized)) != null){
readJava(fileNameSanitized, bufferId);
readJava(fileNameSanitized, bufferId, true);
}
} catch(UnsupportedAudioFileException ex){
LoggerInterface.loggerAudio.ERROR(ex);
@ -135,6 +137,7 @@ public class AudioBuffer {
// Copy to buffer
alBufferData(bufferId, this.channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, pcm, info.sample_rate());
Globals.audioEngine.checkError();
isBuffered = true;
}
}
@ -143,8 +146,9 @@ public class AudioBuffer {
* Tries reading an audio file using java built in audio processing
* @param filepath The filepath to the wav
* @param bufferId The id of the buffer
* @param forceMono Forces the audio to load as a mono channel source
*/
private void readJava(String filepath, int bufferId){
private void readJava(String filepath, int bufferId, boolean forceMono){
try(MemoryStack stack = MemoryStack.stackPush()){
//get raw file objects
AudioInputStream inputStreamRaw = AudioSystem.getAudioInputStream(FileUtils.getAssetFile(filepath));
@ -156,6 +160,9 @@ public class AudioBuffer {
float currentFormatSampleRate = format.getSampleRate();
int desiredSampleSizeInBits = 16;
int desiredChannels = format.getChannels() > 2 ? 2 : format.getChannels();
if(forceMono){
desiredChannels = 1;
}
boolean desiredSigned = true;
boolean currentFormatIsBigEndian = format.isBigEndian();
@ -233,6 +240,7 @@ public class AudioBuffer {
//buffer to openal
AL11.alBufferData(bufferId, channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, buffer, (int)sampleRate);
Globals.audioEngine.checkError();
isBuffered = true;
//cleanup
@ -295,11 +303,20 @@ public class AudioBuffer {
return this.length;
}
/**
* Gets the number of channels for the audio
* @return The number of channels
*/
public int getChannels(){
return this.channels;
}
/**
* Cleans up this audio buffer
*/
public void cleanup() {
alDeleteBuffers(bufferId);
Globals.audioEngine.checkError();
}
}

View File

@ -3,6 +3,8 @@ package electrosphere.audio;
import electrosphere.engine.Globals;
import electrosphere.entity.types.camera.CameraEntityUtils;
import electrosphere.logger.LoggerInterface;
import electrosphere.util.math.MathUtils;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.Collections;
@ -13,9 +15,11 @@ import java.util.concurrent.CopyOnWriteArrayList;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioSystem;
import org.joml.Vector3d;
import org.joml.Vector3f;
import org.lwjgl.BufferUtils;
import org.lwjgl.openal.AL;
import org.lwjgl.openal.AL11;
import org.lwjgl.openal.ALC;
import org.lwjgl.openal.ALC10;
import org.lwjgl.openal.ALC11;
@ -174,11 +178,14 @@ public class AudioEngine {
*/
private void updateListener(){
if(Globals.playerCamera != null){
//position
Vector3f cameraPos = CameraEntityUtils.getCameraCenter(Globals.playerCamera);
Vector3f cameraEye = new Vector3f(CameraEntityUtils.getCameraEye(Globals.playerCamera)).mul(-1);
Vector3f cameraUp = new Vector3f(0,1,0);
listener.setPosition(cameraPos);
listener.setOrientation(cameraEye, cameraUp);
//orientation
Vector3d cameraEye = MathUtils.getOriginVector().rotate(CameraEntityUtils.getRotationQuat(Globals.playerCamera)).normalize();
Vector3d cameraUp = MathUtils.getUpVector().rotate(CameraEntityUtils.getRotationQuat(Globals.playerCamera)).normalize();
listener.setOrientation(new Vector3f((float)cameraEye.x,(float)cameraEye.y,(float)cameraEye.z), new Vector3f((float)cameraUp.x,(float)cameraUp.y,(float)cameraUp.z));
}
}
@ -285,6 +292,51 @@ public class AudioEngine {
public boolean initialized(){
return this.initialized;
}
/**
* Checks for an error on the most recent openal call
*/
public String getLatestErrorMessage(){
int previousMessage = AL11.alGetError();
int error = AL11.alGetError();
while(error != AL11.AL_NO_ERROR){
previousMessage = error;
error = AL11.alGetError();
}
switch(previousMessage){
case AL11.AL_NO_ERROR: {
return null;
}
case AL11.AL_INVALID_NAME: {
return "Bad ID was passed to openal";
}
case AL11.AL_INVALID_ENUM: {
return "Bed enum was passed to openal";
}
case AL11.AL_INVALID_VALUE: {
return "Bad value was passed to openal";
}
case AL11.AL_INVALID_OPERATION: {
return "Bad operation attempted by openal";
}
case AL11.AL_OUT_OF_MEMORY: {
return "Openal is OOM";
}
default: {
return "Unhandled error code! " + error;
}
}
}
/**
* Checks for an error
*/
public void checkError(){
String latestMessage = this.getLatestErrorMessage();
if(latestMessage != null){
LoggerInterface.loggerAudio.ERROR(new IllegalStateException(latestMessage));
}
}

View File

@ -2,7 +2,9 @@ package electrosphere.audio;
import org.joml.Vector3d;
import org.joml.Vector3f;
import org.lwjgl.openal.AL11;
import electrosphere.engine.Globals;
import electrosphere.util.math.MathUtils;
import static org.lwjgl.openal.AL10.*;
@ -13,13 +15,19 @@ import static org.lwjgl.openal.AL10.*;
public class AudioListener {
//The position of the listener
Vector3d position;
Vector3d position = new Vector3d();
//eye vector for listener
Vector3f eye = MathUtils.getOriginVectorf();
//up vector for listener
Vector3f up = new Vector3f(0,1,0);
//buffers used to fetch values
float xB[] = new float[1];
float yB[] = new float[1];
float zB[] = new float[1];
float vecB[] = new float[6];
/**
* Constructor
@ -35,8 +43,9 @@ public class AudioListener {
protected AudioListener(Vector3d position) {
this.position = position;
alListener3f(AL_POSITION, (float)this.position.x, (float)this.position.y, (float)this.position.z);
Globals.audioEngine.checkError();
alListener3f(AL_VELOCITY, 0, 0, 0);
Globals.audioEngine.checkError();
}
/**
@ -45,6 +54,7 @@ public class AudioListener {
*/
protected void setSpeed(Vector3f speed) {
alListener3f(AL_VELOCITY, speed.x, speed.y, speed.z);
Globals.audioEngine.checkError();
}
/**
@ -52,8 +62,11 @@ public class AudioListener {
* @param position the position
*/
protected void setPosition(Vector3f position) {
this.position.set(position.x, position.y, position.z);
alListener3f(AL_POSITION, position.x, position.y, position.z);
Globals.audioEngine.checkError();
AL11.alGetListener3f(AL11.AL_POSITION, xB, yB, zB);
Globals.audioEngine.checkError();
this.position.set(xB[0],yB[0],zB[0]);
}
/**
@ -62,8 +75,6 @@ public class AudioListener {
* @param up The up vector of the camera
*/
protected void setOrientation(Vector3f at, Vector3f up) {
this.eye.set(at);
this.up.set(up);
float[] data = new float[6];
data[0] = at.x;
data[1] = at.y;
@ -72,6 +83,11 @@ public class AudioListener {
data[4] = up.y;
data[5] = up.z;
alListenerfv(AL_ORIENTATION, data);
Globals.audioEngine.checkError();
AL11.alGetListenerfv(AL11.AL_ORIENTATION, vecB);
Globals.audioEngine.checkError();
this.eye.set(vecB[0],vecB[1],vecB[2]);
this.up.set(vecB[3],vecB[4],vecB[5]);
}
/**

View File

@ -3,6 +3,7 @@ package electrosphere.audio;
import org.joml.Vector3f;
import org.lwjgl.openal.AL11;
import electrosphere.engine.Globals;
import electrosphere.logger.LoggerInterface;
import org.lwjgl.openal.AL10;
@ -18,16 +19,14 @@ public class AudioSource {
/**
* Creates an audio source object
* @param loop if true, will loop audio, otherwise will not
* @param relative if true, will make the audio source position relative to the listener based on position
*/
protected static AudioSource create(boolean loop, boolean relative){
protected static AudioSource create(boolean loop){
AudioSource rVal = new AudioSource();
rVal.sourceId = AL10.alGenSources();
if (loop) {
Globals.audioEngine.checkError();
if(loop){
AL10.alSourcei(rVal.sourceId, AL10.AL_LOOPING, AL10.AL_TRUE);
}
if (relative) {
AL10.alSourcei(rVal.sourceId, AL10.AL_SOURCE_RELATIVE, AL10.AL_TRUE);
Globals.audioEngine.checkError();
}
return rVal;
}
@ -39,6 +38,7 @@ public class AudioSource {
public void setBuffer(int bufferId) {
stop();
AL10.alSourcei(sourceId, AL10.AL_BUFFER, bufferId);
Globals.audioEngine.checkError();
}
/**
@ -47,6 +47,7 @@ public class AudioSource {
*/
public void setPosition(Vector3f position) {
AL10.alSource3f(sourceId, AL10.AL_POSITION, position.x, position.y, position.z);
Globals.audioEngine.checkError();
}
/**
@ -55,6 +56,7 @@ public class AudioSource {
*/
public void setSpeed(Vector3f speed) {
AL10.alSource3f(sourceId, AL10.AL_VELOCITY, speed.x, speed.y, speed.z);
Globals.audioEngine.checkError();
}
/**
@ -63,6 +65,7 @@ public class AudioSource {
*/
public void setOffset(float time){
AL10.alSourcef(sourceId, AL11.AL_SEC_OFFSET, time);
Globals.audioEngine.checkError();
}
/**
@ -72,6 +75,7 @@ public class AudioSource {
public void setGain(float gain) {
LoggerInterface.loggerAudio.DEBUG("Set Gain: " + gain);
AL10.alSourcef(sourceId, AL10.AL_GAIN, gain);
Globals.audioEngine.checkError();
}
/**
@ -81,6 +85,7 @@ public class AudioSource {
*/
public void setProperty(int param, float value) {
AL10.alSourcef(sourceId, param, value);
Globals.audioEngine.checkError();
}
/**
@ -88,6 +93,7 @@ public class AudioSource {
*/
public void play() {
AL10.alSourcePlay(sourceId);
Globals.audioEngine.checkError();
}
/**
@ -95,7 +101,9 @@ public class AudioSource {
* @return True if it is playing, false otherwise
*/
public boolean isPlaying() {
return AL10.alGetSourcei(sourceId, AL10.AL_SOURCE_STATE) == AL10.AL_PLAYING;
boolean isPlaying = AL10.alGetSourcei(sourceId, AL10.AL_SOURCE_STATE) == AL10.AL_PLAYING;
Globals.audioEngine.checkError();
return isPlaying;
}
/**
@ -103,6 +111,7 @@ public class AudioSource {
*/
public void pause() {
AL10.alSourcePause(sourceId);
Globals.audioEngine.checkError();
}
/**
@ -110,6 +119,7 @@ public class AudioSource {
*/
public void stop() {
AL10.alSourceStop(sourceId);
Globals.audioEngine.checkError();
}
/**
@ -118,5 +128,6 @@ public class AudioSource {
public void cleanup() {
stop();
AL10.alDeleteSources(sourceId);
Globals.audioEngine.checkError();
}
}

View File

@ -23,7 +23,7 @@ public class AudioUtils {
AudioSource rVal = null;
AudioBuffer buffer = Globals.assetManager.fetchAudio(audioFile);
if(buffer != null && Globals.audioEngine.initialized()){
rVal = AudioSource.create(loops,true);
rVal = AudioSource.create(loops);
rVal.setBuffer(buffer.getBufferId());
rVal.setGain(Globals.audioEngine.getGain());
rVal.setPosition(position);
@ -55,7 +55,7 @@ public class AudioUtils {
AudioSource rVal = null;
AudioBuffer buffer = Globals.assetManager.fetchAudio(FileUtils.sanitizeFilePath(audioFile));
if(buffer != null && Globals.audioEngine.initialized()){
rVal = AudioSource.create(loops,false);
rVal = AudioSource.create(loops);
rVal.setBuffer(buffer.getBufferId());
rVal.setGain(Globals.audioEngine.getGain());
rVal.play();

View File

@ -103,7 +103,7 @@ public class FallTree implements BehaviorTree {
}
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
//first person
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.LAND);
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.LAND, EntityUtils.getPosition(parent));
//play first person audio
} else {
//play third person audio

View File

@ -519,7 +519,7 @@ public class ClientGroundMovementTree implements BehaviorTree {
){
this.playedFootstepFirst = true;
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG);
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
} else {
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
}
@ -532,7 +532,7 @@ public class ClientGroundMovementTree implements BehaviorTree {
){
this.playedFootstepSecond = true;
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG);
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
} else {
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.STEP_BARE_REG, position);
}

View File

@ -310,7 +310,7 @@ public class ClientJumpTree implements BehaviorTree {
} break;
case ACTIVE: {
if(parent == Globals.playerEntity && !Globals.controlHandler.cameraIsThirdPerson()){
Globals.movementAudioService.playAudio(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.JUMP);
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.JUMP, EntityUtils.getPosition(parent));
} else {
Globals.movementAudioService.playAudioPositional(ClientVoxelSampler.getVoxelTypeBeneathEntity(parent), InteractionType.JUMP, EntityUtils.getPosition(parent));
}

View File

@ -1,7 +1,11 @@
package electrosphere.menu.debug;
import org.joml.Vector3d;
import electrosphere.audio.AudioBuffer;
import electrosphere.audio.AudioListener;
import electrosphere.audio.VirtualAudioSource;
import electrosphere.audio.VirtualAudioSourceManager.VirtualAudioSourceType;
import electrosphere.engine.Globals;
import electrosphere.renderer.ui.imgui.ImGuiWindow;
import electrosphere.renderer.ui.imgui.ImGuiWindow.ImGuiWindowCallback;
@ -64,9 +68,37 @@ public class ImGuiAudio {
for(AudioBuffer buffer : Globals.assetManager.getAllAudio()){
if(buffer.getFilePath().contains(bufferListFilter.get()) && ImGui.collapsingHeader(buffer.getFilePath())){
ImGui.text("Length: " + buffer.getLength());
ImGui.text("Channels: " + buffer.getChannels());
}
}
}
//testing
if(ImGui.collapsingHeader("Spatial Testing")){
if(ImGui.button("Front")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).add(listener.getEyeVector()));
}
if(ImGui.button("Back")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).sub(listener.getEyeVector()));
}
if(ImGui.button("Above")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).add(listener.getUpVector()));
}
if(ImGui.button("Below")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).sub(listener.getUpVector()));
}
if(ImGui.button("Left")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).add(listener.getEyeVector().cross(listener.getUpVector())));
}
if(ImGui.button("Right")){
AudioListener listener = Globals.audioEngine.getListener();
Globals.virtualAudioSourceManager.createVirtualAudioSource("/Audio/movement/surface/dirt/Bare Step Gravel Medium A.wav", VirtualAudioSourceType.UI, false, new Vector3d(listener.getPosition()).sub(listener.getEyeVector().cross(listener.getUpVector())));
}
}
}
});
audioDebugMenu.setOpen(false);